From 621d9a949a57a9100b7f2d1465ebd32aaeddb05c Mon Sep 17 00:00:00 2001 From: jkarres Date: Sun, 5 Apr 2020 00:06:41 -0700 Subject: [PATCH 001/632] typo fix --- docs/source/nodes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/nodes.rst b/docs/source/nodes.rst index 21965a91..62e206cc 100644 --- a/docs/source/nodes.rst +++ b/docs/source/nodes.rst @@ -3,7 +3,7 @@ Nodes ===== -CSTNode and it's subclasses cover Python's full grammar in a +CSTNode and its subclasses cover Python's full grammar in a whitespace-sensitive fashion, forming LibCST's concrete syntax tree. Many of these nodes are designed to `behave similarly to Python's abstract From dc782f116a162d99410bec72776921757e662a87 Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Thu, 9 Apr 2020 00:51:18 -0400 Subject: [PATCH 002/632] Fix "Too many open files" errors when running on large repo on OSX It appears we're running out of file handles when running on large repo on OSX due to waiting until all files have been processed to join/close the subprocesses. This PR joins/closes them as they finish, resolving the issue. ``` $ python3 -m libcst.tool codemod {codemod_name} {direectory} Calculating full-repo metadata... Executing codemod... Codemodding {file} Traceback (most recent call last): File "/Users/rwilliams/src/go/src/github.com/lyft/python-lyft-ingest/venv/lib/python3.6/site-packages/libcst/codemod/_cli.py", line 253, in _parallel_exec_process_stub OSError: [Errno 24] Too many open files: '{file}' Failed to codemod {file} 19.80s 24% complete, 01m 02s estimated for 741 files to go...Traceback (most recent call last): File "/opt/lyft/brew/Cellar/python36/3.6.5_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/runpy.py", line 193, in _run_module_as_main "__main__", mod_spec) File "/opt/lyft/brew/Cellar/python36/3.6.5_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/runpy.py", line 85, in _run_code exec(code, run_globals) File "/Users/rwilliams/src/go/src/github.com/lyft/python-lyft-ingest/venv/lib/python3.6/site-packages/libcst/tool.py", line 833, in main(os.environ.get("LIBCST_TOOL_COMMAND_NAME", "libcst.tool"), sys.argv[1:]) File "/Users/rwilliams/src/go/src/github.com/lyft/python-lyft-ingest/venv/lib/python3.6/site-packages/libcst/tool.py", line 828, in main return lookup.get(args.action or None, _invalid_command)(proc_name, command_args) File "/Users/rwilliams/src/go/src/github.com/lyft/python-lyft-ingest/venv/lib/python3.6/site-packages/libcst/tool.py", line 581, in _codemod_impl repo_root=config["repo_root"], File "/Users/rwilliams/src/go/src/github.com/lyft/python-lyft-ingest/venv/lib/python3.6/site-packages/libcst/codemod/_cli.py", line 720, in parallel_exec_transform_with_prettyprint process.start() File "/opt/lyft/brew/Cellar/python36/3.6.5_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/multiprocessing/process.py", line 105, in start self._popen = self._Popen(self) File "/opt/lyft/brew/Cellar/python36/3.6.5_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/multiprocessing/context.py", line 223, in _Popen return _default_context.get_context().Process._Popen(process_obj) File "/opt/lyft/brew/Cellar/python36/3.6.5_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/multiprocessing/context.py", line 277, in _Popen return Popen(process_obj) File "/opt/lyft/brew/Cellar/python36/3.6.5_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/multiprocessing/popen_fork.py", line 19, in __init__ self._launch(process_obj) File "/opt/lyft/brew/Cellar/python36/3.6.5_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/multiprocessing/popen_fork.py", line 65, in _launch parent_r, child_w = os.pipe() OSError: [Errno 24] Too many open files ``` --- libcst/codemod/_cli.py | 48 ++++++++++++++++++++++++------------------ 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index a9c9e81a..1d513c3c 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -19,7 +19,7 @@ from dataclasses import dataclass, replace from multiprocessing import Process, Queue, cpu_count from pathlib import Path from queue import Empty -from typing import AnyStr, List, Optional, Sequence, Union, cast +from typing import AnyStr, Dict, List, Optional, Sequence, Set, Union, cast from libcst import PartialParserConfig, parse_module from libcst.codemod._codemod import Codemod @@ -682,30 +682,31 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 pending_processes: List[Process] = [] # Start processes + filename_to_process: Dict[str, Process] = {} for f in files: - pending_processes.append( - Process( - target=_parallel_exec_process_stub, - args=( - queue, - transform, - f, - repo_root, - unified_diff, - include_generated, - generated_code_marker, - format_code, - formatter_args, - blacklist_patterns, - python_version, - ), - ) + process = Process( + target=_parallel_exec_process_stub, + args=( + queue, + transform, + f, + repo_root, + unified_diff, + include_generated, + generated_code_marker, + format_code, + formatter_args, + blacklist_patterns, + python_version, + ), ) + pending_processes.append(process) + filename_to_process[f] = process # Start the processes, allowing no more than num_processes to be running # at once. results_left = len(pending_processes) - joinable_processes: List[Process] = [] + joinable_processes: Set[Process] = set() processes_started = 0 interrupted = False @@ -714,7 +715,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 try: # Move this process to the joinables process = pending_processes.pop(0) - joinable_processes.append(process) + joinable_processes.add(process) # Start it, bookkeep that we did process.start() @@ -755,6 +756,13 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 skips += 1 warnings += len(result.transform_result.warning_messages) + process = filename_to_process.get(result.filename) + if process: + # Join and close the process to free any related file handles. + process.join() + process.close() + if process in joinable_processes: + joinable_processes.remove(process) # Now, join on all of them so we don't leave zombies or hang for p in joinable_processes: From 834b5b989e69da24c60740f855d0e95da900e2ca Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Thu, 9 Apr 2020 01:23:47 -0400 Subject: [PATCH 003/632] Support <3.7 --- libcst/codemod/_cli.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 1d513c3c..7eccd52e 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -756,11 +756,13 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 skips += 1 warnings += len(result.transform_result.warning_messages) - process = filename_to_process.get(result.filename) + + # Join the process to free any related resources. + # Remove all references to the process to allow the GC to + # clean up any file handles. + process = filename_to_process.pop(result.filename, None) if process: - # Join and close the process to free any related file handles. process.join() - process.close() if process in joinable_processes: joinable_processes.remove(process) From 30cb9f3c6a0febf567e65d1a40ed89ce28150866 Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Thu, 9 Apr 2020 10:18:44 -0400 Subject: [PATCH 004/632] s/remove/discard --- libcst/codemod/_cli.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 7eccd52e..39a3ff27 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -763,8 +763,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 process = filename_to_process.pop(result.filename, None) if process: process.join() - if process in joinable_processes: - joinable_processes.remove(process) + joinable_processes.discard(process) # Now, join on all of them so we don't leave zombies or hang for p in joinable_processes: From 477a03e3a822fe9b87ef8d891351192325837818 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 21 Apr 2020 10:27:47 +0100 Subject: [PATCH 005/632] [ScopeProvider] Expose more granular Assignments and Accesses for dotted imports (#284) --- docs/source/metadata.rst | 15 +++- .../tests/test_remove_unused_imports.py | 29 +++++++ libcst/codemod/visitors/_remove_imports.py | 30 ++++--- .../visitors/tests/test_remove_imports.py | 7 ++ libcst/metadata/scope_provider.py | 84 +++++++++++++------ libcst/metadata/tests/test_scope_provider.py | 71 ++++++++++++---- 6 files changed, 175 insertions(+), 61 deletions(-) diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index 5f0df280..f6c9c078 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -141,13 +141,26 @@ There are four different type of scope in Python: :class:`~libcst.metadata.ComprehensionScope`. .. image:: _static/img/python_scopes.png - :alt: LibCST + :alt: Diagram showing how the above 4 scopes are nested in each other :width: 400 :align: center LibCST allows you to inspect these scopes to see what local variables are assigned or accessed within. +.. note:: + Import statements bring new symbols into scope that are declared in other files. + As such, they are represented by :class:`~libcst.metadata.Assignment` for scope + analysis purposes. Dotted imports (e.g. ``import a.b.c``) generate multiple + :class:`~libcst.metadata.Assignment` objects — one for each module. When analyzing + references, only the most specific access is recorded. + + For example, the above ``import a.b.c`` statement generates three + :class:`~libcst.metadata.Assignment` objects: one for ``a``, one for ``a.b``, and + one for ``a.b.c``. A reference for ``a.b.c`` records an access only for the last + assignment, while a reference for ``a.d`` only records an access for the + :class:`~libcst.metadata.Assignment` representing ``a``. + .. autoclass:: libcst.metadata.ScopeProvider :no-undoc-members: diff --git a/libcst/codemod/commands/tests/test_remove_unused_imports.py b/libcst/codemod/commands/tests/test_remove_unused_imports.py index 08204358..8c500e0b 100644 --- a/libcst/codemod/commands/tests/test_remove_unused_imports.py +++ b/libcst/codemod/commands/tests/test_remove_unused_imports.py @@ -48,3 +48,32 @@ class RemoveUnusedImportsCommandTest(CodemodTest): x: a = 1 """ self.assertCodemod(before, before) + + def test_dotted_imports(self) -> None: + before = """ + import a.b, a.b.c + import e.f + import g.h + import x.y, x.y.z + + def foo() -> None: + a.b + e.g + g.h.i + x.y.z + """ + + after = """ + import a.b, a.b.c + import e.f + import g.h + import x.y.z + + def foo() -> None: + a.b + e.g + g.h.i + x.y.z + """ + + self.assertCodemod(before, after) diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 0023bd22..841812c4 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -12,6 +12,7 @@ from libcst.codemod._visitor import ContextAwareTransformer, ContextAwareVisitor from libcst.codemod.visitors._gather_exports import GatherExportsVisitor from libcst.helpers import get_absolute_module_for_import, get_full_name_for_node from libcst.metadata import Assignment, Scope, ScopeProvider +from libcst.metadata.scope_provider import _gen_dotted_names class RemovedNodeVisitor(ContextAwareVisitor): @@ -295,24 +296,21 @@ class RemoveImportsVisitor(ContextAwareTransformer): def _is_in_use(self, scope: Scope, alias: cst.ImportAlias) -> bool: # Grab the string name of this alias from the point of view of this module. asname = alias.asname - if asname is not None: - name_node = asname.name - else: - name_node = alias.name - while isinstance(name_node, cst.Attribute): - name_node = name_node.value - name_or_alias = cst.ensure_type(name_node, cst.Name).value + names = _gen_dotted_names( + cst.ensure_type(asname.name, cst.Name) if asname is not None else alias.name + ) - if name_or_alias in self.exported_objects: - return True - - for assignment in scope[name_or_alias]: - if ( - isinstance(assignment, Assignment) - and isinstance(assignment.node, (cst.ImportFrom, cst.Import)) - and len(assignment.references) > 0 - ): + for name_or_alias, _ in names: + if name_or_alias in self.exported_objects: return True + + for assignment in scope[name_or_alias]: + if ( + isinstance(assignment, Assignment) + and isinstance(assignment.node, (cst.ImportFrom, cst.Import)) + and len(assignment.references) > 0 + ): + return True return False def leave_Import( diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index ec8e460c..76c751c6 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -387,21 +387,25 @@ class TestRemoveImportsCodemod(CodemodTest): import baz, qux import a.b import c.d + import x.y.z import e.f as g import h.i as j def foo() -> None: c.d() + x.u j() """ after = """ import bar import qux import c.d + import x.y.z import h.i as j def foo() -> None: c.d() + x.u j() """ @@ -414,6 +418,7 @@ class TestRemoveImportsCodemod(CodemodTest): ("c.d", None, None), ("e.f", None, "g"), ("h.i", None, "j"), + ("x.y.z", None, None), ], ) @@ -428,6 +433,7 @@ class TestRemoveImportsCodemod(CodemodTest): from d.e import f from h.i import j as k from l.m import n as o + from x import * def foo() -> None: f() @@ -437,6 +443,7 @@ class TestRemoveImportsCodemod(CodemodTest): from bar import qux from d.e import f from h.i import j as k + from x import * def foo() -> None: f() diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index f77ee576..296da70d 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -14,17 +14,20 @@ from enum import Enum, auto from typing import ( Collection, Dict, + Iterable, Iterator, List, Mapping, MutableMapping, Optional, Set, + Tuple, Type, Union, ) import libcst as cst +from libcst import ensure_type from libcst._add_slots import add_slots from libcst._metadata_dependent import MetadataDependent from libcst.helpers import get_full_name_for_node @@ -52,11 +55,14 @@ class Access: ... """ - #: The name node of the access. A name is an access when the expression context is - #: :attr:`ExpressionContext.LOAD`. - node: cst.Name + #: The node of the access. A name is an access when the expression context is + #: :attr:`ExpressionContext.LOAD`. This is usually the name node representing the + #: access, except for dotted imports, when it might be the attribute that + #: represents the most specific part of the imported symbol. + node: Union[cst.Name, cst.Attribute] - #: The scope of the access. Note that a access could be in a child scope of its assignment. + #: The scope of the access. Note that a access could be in a child scope of its + #: assignment. scope: "Scope" __assignments: Set["BaseAssignment"] @@ -584,12 +590,32 @@ class ComprehensionScope(LocalScope): pass +# Generates dotted names from an Attribute or Name node: +# Attribute(value=Name(value="a"), attr=Name(value="b")) -> ("a.b", "a") +# each string has the corresponding CSTNode attached to it +def _gen_dotted_names( + node: Union[cst.Attribute, cst.Name] +) -> Iterable[Tuple[str, Union[cst.Attribute, cst.Name]]]: + if isinstance(node, cst.Name): + yield (node.value, node) + else: + value = node.value + if not isinstance(value, (cst.Attribute, cst.Name)): + raise ValueError(f"Unexpected name value in import: {value}") + name_values = iter(_gen_dotted_names(value)) + (next_name, next_node) = next(name_values) + yield (f"{next_name}.{node.attr.value}", node) + yield (next_name, next_node) + yield from name_values + + class ScopeVisitor(cst.CSTVisitor): # since it's probably not useful. That can makes this visitor cleaner. def __init__(self, provider: "ScopeProvider") -> None: self.provider: ScopeProvider = provider self.scope: Scope = GlobalScope() - self.__deferred_accesses: List[Access] = [] + self.__deferred_accesses: List[Tuple[Access, Optional[cst.Attribute]]] = [] + self.__top_level_attribute: Optional[cst.Attribute] = None @contextmanager def _new_scope( @@ -613,24 +639,18 @@ class ScopeVisitor(cst.CSTVisitor): def _visit_import_alike(self, node: Union[cst.Import, cst.ImportFrom]) -> bool: names = node.names - if not isinstance(names, cst.ImportStar): - # make sure node.names is Sequence[ImportAlias] - for name in names: - asname = name.asname - if asname is not None: - name_value = cst.ensure_type(asname.name, cst.Name).value - else: - name_node = name.name - while isinstance(name_node, cst.Attribute): - # the value of Attribute in import alike can only be either Name or Attribute - name_node = name_node.value - if isinstance(name_node, cst.Name): - name_value = name_node.value - else: - raise Exception( - f"Unexpected ImportAlias name value: {name_node}" - ) + if isinstance(names, cst.ImportStar): + return False + # make sure node.names is Sequence[ImportAlias] + for name in names: + asname = name.asname + if asname is not None: + name_values = _gen_dotted_names(cst.ensure_type(asname.name, cst.Name)) + else: + name_values = _gen_dotted_names(name.name) + + for name_value, _ in name_values: self.scope.record_assignment(name_value, node) return False @@ -641,7 +661,11 @@ class ScopeVisitor(cst.CSTVisitor): return self._visit_import_alike(node) def visit_Attribute(self, node: cst.Attribute) -> Optional[bool]: + if self.__top_level_attribute is None: + self.__top_level_attribute = node node.value.visit(self) # explicitly not visiting attr + if self.__top_level_attribute is node: + self.__top_level_attribute = None return False def visit_Name(self, node: cst.Name) -> Optional[bool]: @@ -651,8 +675,7 @@ class ScopeVisitor(cst.CSTVisitor): self.scope.record_assignment(node.value, node) elif context in (ExpressionContext.LOAD, ExpressionContext.DEL): access = Access(node, self.scope) - self.__deferred_accesses.append(access) - self.scope.record_access(node.value, access) + self.__deferred_accesses.append((access, self.__top_level_attribute)) def visit_FunctionDef(self, node: cst.FunctionDef) -> Optional[bool]: self.scope.record_assignment(node.name.value, node) @@ -788,10 +811,19 @@ class ScopeVisitor(cst.CSTVisitor): # In worst case, all accesses (m) and assignments (n) refer to the same name, # the time complexity is O(m x n), this optimizes it as O(m + n). scope_name_accesses = defaultdict(set) - for access in self.__deferred_accesses: - name = access.node.value + for (access, enclosing_attribute) in self.__deferred_accesses: + if enclosing_attribute is not None: + name = None + for name, node in _gen_dotted_names(enclosing_attribute): + if name in access.scope: + access.node = node + break + assert name is not None + else: + name = ensure_type(access.node, cst.Name).value scope_name_accesses[(access.scope, name)].add(access) access.record_assignments(access.scope[name]) + access.scope.record_access(name, access) for (scope, name), accesses in scope_name_accesses.items(): for assignment in scope[name]: diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 5699be17..f9d16462 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -141,25 +141,60 @@ class ScopeProviderTest(UnitTest): """ ) scope_of_module = scopes[m] - for idx, in_scope in enumerate(["foo", "fizzbuzz", "a", "g"]): - self.assertEqual( - len(scope_of_module[in_scope]), 1, f"{in_scope} should be in scope." - ) + for idx, in_scopes in enumerate( + [["foo", "foo.bar"], ["fizzbuzz"], ["a", "a.b", "a.b.c"], ["g"],] + ): + for in_scope in in_scopes: + self.assertEqual( + len(scope_of_module[in_scope]), 1, f"{in_scope} should be in scope." + ) - assignment = cast(Assignment, list(scope_of_module[in_scope])[0]) - self.assertEqual( - assignment.name, - in_scope, - f"Assignment name {assignment.name} should equal to {in_scope}.", - ) - import_node = ensure_type(m.body[idx], cst.SimpleStatementLine).body[0] - self.assertEqual( - assignment.node, - import_node, - f"The node of Assignment {assignment.node} should equal to {import_node}", - ) + assignment = cast(Assignment, list(scope_of_module[in_scope])[0]) + self.assertEqual( + assignment.name, + in_scope, + f"Assignment name {assignment.name} should equal to {in_scope}.", + ) + import_node = ensure_type(m.body[idx], cst.SimpleStatementLine).body[0] + self.assertEqual( + assignment.node, + import_node, + f"The node of Assignment {assignment.node} should equal to {import_node}", + ) - def test_imoprt_from(self) -> None: + def test_dotted_import_access(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + import a.b.c, x.y + a.b.c(x.z) + """ + ) + scope_of_module = scopes[m] + first_statement = ensure_type(m.body[1], cst.SimpleStatementLine) + call = ensure_type( + ensure_type(first_statement.body[0], cst.Expr).value, cst.Call + ) + self.assertTrue("a.b.c" in scope_of_module) + self.assertTrue("a" in scope_of_module) + self.assertEqual(scope_of_module.accesses["a"], set()) + + a_b_c_assignment = cast(Assignment, list(scope_of_module["a.b.c"])[0]) + a_b_c_access = list(a_b_c_assignment.references)[0] + self.assertEqual(scope_of_module.accesses["a.b.c"], {a_b_c_access}) + self.assertEqual(a_b_c_access.node, call.func) + + x_assignment = cast(Assignment, list(scope_of_module["x"])[0]) + x_access = list(x_assignment.references)[0] + self.assertEqual(scope_of_module.accesses["x"], {x_access}) + self.assertEqual( + x_access.node, ensure_type(call.args[0].value, cst.Attribute).value + ) + + self.assertTrue("x.y" in scope_of_module) + self.assertEqual(list(scope_of_module["x.y"])[0].references, set()) + self.assertEqual(scope_of_module.accesses["x.y"], set()) + + def test_import_from(self) -> None: m, scopes = get_scope_metadata_provider( """ from foo.bar import a, b as b_renamed @@ -782,7 +817,7 @@ class ScopeProviderTest(UnitTest): }, ) - def test_assignemnts_and_accesses(self) -> None: + def test_assignments_and_accesses(self) -> None: m, scopes = get_scope_metadata_provider( """ a = 1 From e5abaa85981e8f44b197277908d09b7a578015da Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Thu, 23 Apr 2020 11:08:19 -0700 Subject: [PATCH 006/632] [codemod] show errors from formatter subprocess call --- libcst/codemod/_cli.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 39a3ff27..5c134019 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -58,7 +58,7 @@ def invoke_formatter(formatter_args: Sequence[str], code: AnyStr) -> AnyStr: formatter_args, env={}, input=code, - stderr=subprocess.DEVNULL, + stderr=subprocess.STDOUT, universal_newlines=not work_with_bytes, encoding=None if work_with_bytes else "utf-8", ), @@ -70,6 +70,9 @@ def print_execution_result(result: TransformResult) -> None: print(f"WARNING: {warning}", file=sys.stderr) if isinstance(result, TransformFailure): + error = result.error + if isinstance(error, subprocess.CalledProcessError): + print(error.output.decode("utf-8"), file=sys.stderr) print(result.traceback_str, file=sys.stderr) From 675e91db8dd66c1c185759a17ef54af153d6bd2b Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Thu, 23 Apr 2020 11:08:19 -0700 Subject: [PATCH 007/632] [codemod] show errors from formatter subprocess call --- .../codemod_formatter_error_input.py.txt | 15 ++++++++ libcst/codemod/tests/test_codemod_cli.py | 38 +++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 libcst/codemod/tests/codemod_formatter_error_input.py.txt create mode 100644 libcst/codemod/tests/test_codemod_cli.py diff --git a/libcst/codemod/tests/codemod_formatter_error_input.py.txt b/libcst/codemod/tests/codemod_formatter_error_input.py.txt new file mode 100644 index 00000000..a0ec9d8d --- /dev/null +++ b/libcst/codemod/tests/codemod_formatter_error_input.py.txt @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict + +import subprocess # noqa: F401 +from contextlib import AsyncExitStack + + +def fun() -> None: + # this is an explicit syntax error to cause formatter error + async with AsyncExitStack() as stack: + stack diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py new file mode 100644 index 00000000..becf29e8 --- /dev/null +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -0,0 +1,38 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict + + +import subprocess +import sys + +from libcst.testing.utils import UnitTest + + +class TestCodemodCLI(UnitTest): + def test_codemod_formatter_error_input(self) -> None: + rlt = subprocess.run( + [ + "python", + "-m", + "libcst.tool", + "codemod", + "remove_unused_imports.RemoveUnusedImportsCommand", + "libcst/codemod/tests/codemod_formatter_error_input.py.txt", + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + version = sys.version_info + if version[0] == 3 and version[1] == 6: + self.assertIn( + "ParserSyntaxError: Syntax Error @ 14:11.", rlt.stderr.decode("utf-8"), + ) + else: + self.assertIn( + "error: cannot format -: Cannot parse: 13:10: async with AsyncExitStack() as stack:", + rlt.stderr.decode("utf-8"), + ) From c992d2d01d2391aab94cead72d97db0da304e88d Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 22 Apr 2020 16:07:27 +0100 Subject: [PATCH 008/632] Add dotted imports support in get_qualified_names --- libcst/metadata/scope_provider.py | 30 ++++++++++---- libcst/metadata/tests/test_scope_provider.py | 42 ++++++++++++++++++++ 2 files changed, 64 insertions(+), 8 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 296da70d..4eafb77b 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -234,15 +234,23 @@ class _NameUtil: if not isinstance(import_names, cst.ImportStar): for name in import_names: real_name = get_full_name_for_node(name.name) - as_name = real_name - if name and name.asname: - name_asname = name.asname - if name_asname: - as_name = cst.ensure_type(name_asname.name, cst.Name).value - if as_name and full_name.startswith(as_name): + if not real_name: + continue + # real_name can contain `.` for dotted imports + # for these we want to find the longest prefix that matches full_name + parts = real_name.split(".") + real_names = [ + ".".join(parts[: i + 1]) for i in reversed(range(len(parts))) + ] + for real_name in real_names: + as_name = real_name if module: real_name = f"{module}.{real_name}" - if real_name: + if name and name.asname: + name_asname = name.asname + if name_asname: + as_name = cst.ensure_type(name_asname.name, cst.Name).value + if full_name.startswith(as_name): remaining_name = full_name.split(as_name)[1].lstrip(".") results.add( QualifiedName( @@ -252,6 +260,7 @@ class _NameUtil: QualifiedNameSource.IMPORT, ) ) + break return results @staticmethod @@ -422,8 +431,13 @@ class Scope(abc.ABC): full_name = get_full_name_for_node(node) if full_name is None: return results + assignments = set() parts = full_name.split(".") - assignments = self[parts[0]] if parts[0] in self else set() + for i in reversed(range(len(parts))): + prefix = ".".join(parts[: i + 1]) + if prefix in self: + assignments = self[prefix] + break for assignment in assignments: if isinstance(assignment, Assignment): assignment_node = assignment.node diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index f9d16462..b125e8d9 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -787,6 +787,48 @@ class ScopeProviderTest(UnitTest): {QualifiedName("f4..f5..C", QualifiedNameSource.LOCAL)}, ) + def test_get_qualified_names_for_dotted_imports(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + import a.b.c + a(a.b.d) + """ + ) + call = ensure_type( + ensure_type( + ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.Expr + ).value, + cst.Call, + ) + module_scope = scopes[m] + self.assertEqual( + module_scope.get_qualified_names_for(call.func), + {QualifiedName("a", QualifiedNameSource.IMPORT)}, + ) + self.assertEqual( + module_scope.get_qualified_names_for(call.args[0].value), + {QualifiedName("a.b.d", QualifiedNameSource.IMPORT)}, + ) + + import_stmt = ensure_type( + ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.Import + ) + a_b_c = ensure_type(import_stmt.names[0].name, cst.Attribute) + a_b = ensure_type(a_b_c.value, cst.Attribute) + a = a_b.value + self.assertEqual( + module_scope.get_qualified_names_for(a_b_c), + {QualifiedName("a.b.c", QualifiedNameSource.IMPORT)}, + ) + self.assertEqual( + module_scope.get_qualified_names_for(a_b), + {QualifiedName("a.b", QualifiedNameSource.IMPORT)}, + ) + self.assertEqual( + module_scope.get_qualified_names_for(a), + {QualifiedName("a", QualifiedNameSource.IMPORT)}, + ) + def test_multiple_assignments(self) -> None: m, scopes = get_scope_metadata_provider( """ From 53d4cfaca4dd678b996988379475f6d7eb5cf953 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 28 Apr 2020 10:23:22 +0100 Subject: [PATCH 009/632] simplify code based on review comments --- libcst/metadata/scope_provider.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 4eafb77b..655ca063 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -239,17 +239,13 @@ class _NameUtil: # real_name can contain `.` for dotted imports # for these we want to find the longest prefix that matches full_name parts = real_name.split(".") - real_names = [ - ".".join(parts[: i + 1]) for i in reversed(range(len(parts))) - ] + real_names = [".".join(parts[:i]) for i in range(len(parts), 0, -1)] for real_name in real_names: as_name = real_name if module: real_name = f"{module}.{real_name}" if name and name.asname: - name_asname = name.asname - if name_asname: - as_name = cst.ensure_type(name_asname.name, cst.Name).value + as_name = name.evaluated_alias if full_name.startswith(as_name): remaining_name = full_name.split(as_name)[1].lstrip(".") results.add( @@ -433,8 +429,8 @@ class Scope(abc.ABC): return results assignments = set() parts = full_name.split(".") - for i in reversed(range(len(parts))): - prefix = ".".join(parts[: i + 1]) + for i in range(len(parts), 0, -1): + prefix = ".".join(parts[:i]) if prefix in self: assignments = self[prefix] break From 6f6223d8ab513ce87ce12b701793fe8a66584126 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 28 Apr 2020 16:48:56 +0100 Subject: [PATCH 010/632] fix type error --- libcst/metadata/scope_provider.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 655ca063..725fc38e 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -245,7 +245,9 @@ class _NameUtil: if module: real_name = f"{module}.{real_name}" if name and name.asname: - as_name = name.evaluated_alias + eval_alias = name.evaluated_alias + if eval_alias is not None: + as_name = eval_alias if full_name.startswith(as_name): remaining_name = full_name.split(as_name)[1].lstrip(".") results.add( From 93a389b8d9e707fb1f93a9e22efe4be11375361e Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 28 Apr 2020 09:09:45 +0100 Subject: [PATCH 011/632] Handle non-import attribute accesses gracefully --- libcst/metadata/scope_provider.py | 6 ++++-- libcst/metadata/tests/test_scope_provider.py | 7 +++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 725fc38e..0d332886 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -613,7 +613,8 @@ def _gen_dotted_names( else: value = node.value if not isinstance(value, (cst.Attribute, cst.Name)): - raise ValueError(f"Unexpected name value in import: {value}") + # this is not an import + return name_values = iter(_gen_dotted_names(value)) (next_name, next_node) = next(name_values) yield (f"{next_name}.{node.attr.value}", node) @@ -830,7 +831,8 @@ class ScopeVisitor(cst.CSTVisitor): if name in access.scope: access.node = node break - assert name is not None + if name is None: + continue else: name = ensure_type(access.node, cst.Name).value scope_name_accesses[(access.scope, name)].add(access) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index b125e8d9..1aaff4eb 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1106,6 +1106,13 @@ class ScopeProviderTest(UnitTest): list(scope.parent._accesses.items()), before_parent_accesses ) + def test_attribute_of_function_call(self) -> None: + get_scope_metadata_provider("foo().bar") + + def test_self(self) -> None: + with open(__file__) as f: + get_scope_metadata_provider(f.read()) + def test_get_qualified_names_for_is_read_only(self) -> None: m, scopes = get_scope_metadata_provider( """ From bfcc4560870b73497e7fdf880037a5b6d43ff24d Mon Sep 17 00:00:00 2001 From: Pradeep Kumar Srinivasan Date: Mon, 27 Apr 2020 19:33:32 -0700 Subject: [PATCH 012/632] Add argument for ignoring existing annotations. This will allow us to override existing types based on the stub. --- .../visitors/_apply_type_annotations.py | 55 ++++++++++++++----- .../tests/test_apply_type_annotations.py | 40 +++++++++++++- 2 files changed, 81 insertions(+), 14 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index d047f72b..39754af7 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -203,7 +203,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): This is one of the transforms that is available automatically to you when running a codemod. To use it in this manner, import :class:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor` and then call the static - :meth:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor.add_stub_to_context` method, + :meth:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor.store_stub_in_context` method, giving it the current context (found as ``self.context`` for all subclasses of :class:`~libcst.codemod.Codemod`), the stub module from which you wish to add annotations. @@ -211,7 +211,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): stub_module = parse_module("x: int = ...") - ApplyTypeAnnotationsVisitor.add_stub_to_context(self.context, stub_module) + ApplyTypeAnnotationsVisitor.store_stub_in_context(self.context, stub_module) You can apply the type annotation using:: @@ -223,12 +223,19 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): x: int = 1 If the function or attribute already has a type annotation, it will not be overwritten. + + To overwrite existing annotations when applying annotations from a stub, + use the keyword argument ``overwrite_existing_annotations=True`` when + constructing the codemod or when calling ``store_stub_in_context``. """ CONTEXT_KEY = "ApplyTypeAnnotationsVisitor" def __init__( - self, context: CodemodContext, annotations: Optional[Annotations] = None + self, + context: CodemodContext, + annotations: Optional[Annotations] = None, + overwrite_existing_annotations: bool = False, ) -> None: super().__init__(context) # Qualifier for storing the canonical name of the current function. @@ -236,20 +243,32 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.annotations: Annotations = annotations or Annotations() self.toplevel_annotations: Dict[str, cst.Annotation] = {} self.visited_classes: Set[str] = set() + self.overwrite_existing_annotations = overwrite_existing_annotations # We use this to determine the end of the import block so that we can # insert top-level annotations. self.import_statements: List[cst.ImportFrom] = [] @staticmethod - def add_stub_to_context(context: CodemodContext, stub: cst.Module) -> None: + def store_stub_in_context( + context: CodemodContext, + stub: cst.Module, + overwrite_existing_annotations: bool = False, + ) -> None: """ - Add a stub module to the :class:`~libcst.codemod.CodemodContext` so + Store a stub module in the :class:`~libcst.codemod.CodemodContext` so that type annotations from the stub can be applied in a later invocation of this class. + + If the ``overwrite_existing_annotations`` flag is ``True``, the + codemod will overwrite any existing annotations. + + If you call this function multiple times, only the last values of + ``stub`` and ``overwrite_existing_annotations`` will take effect. """ - context.scratch.setdefault(ApplyTypeAnnotationsVisitor.CONTEXT_KEY, []).append( - stub + context.scratch[ApplyTypeAnnotationsVisitor.CONTEXT_KEY] = ( + stub, + overwrite_existing_annotations, ) def transform_module_impl(self, tree: cst.Module) -> cst.Module: @@ -262,8 +281,14 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): tree.visit(import_gatherer) existing_import_names = _get_import_names(import_gatherer.all_imports) - stubs = self.context.scratch.get(ApplyTypeAnnotationsVisitor.CONTEXT_KEY, []) - for stub in stubs: + context_contents = self.context.scratch.get( + ApplyTypeAnnotationsVisitor.CONTEXT_KEY + ) + if context_contents: + stub, overwrite_existing_annotations = context_contents + self.overwrite_existing_annotations = ( + self.overwrite_existing_annotations or overwrite_existing_annotations + ) visitor = TypeCollector(existing_import_names, self.context) stub.visit(visitor) self.annotations.function_annotations.update(visitor.function_annotations) @@ -339,7 +364,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self, annotations: FunctionAnnotation, updated_node: cst.FunctionDef ) -> cst.Parameters: # Update params and default params with annotations - # don't override existing annotations or default values + # Don't override existing annotations or default values unless asked + # to overwrite existing annotations. def update_annotation( parameters: Sequence[cst.Param], annotations: Sequence[cst.Param] ) -> List[cst.Param]: @@ -350,7 +376,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): parameter_annotations[parameter.name.value] = parameter.annotation for parameter in parameters: key = parameter.name.value - if key in parameter_annotations and not parameter.annotation: + if key in parameter_annotations and ( + self.overwrite_existing_annotations or not parameter.annotation + ): parameter = parameter.with_changes( annotation=parameter_annotations[key] ) @@ -409,8 +437,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.qualifier.pop() if key in self.annotations.function_annotations: function_annotation = self.annotations.function_annotations[key] - # Only add new annotation if one doesn't already exist - if not updated_node.returns: + # Only add new annotation if explicitly told to overwrite existing + # annotations or if one doesn't already exist. + if self.overwrite_existing_annotations or not updated_node.returns: updated_node = updated_node.with_changes( returns=function_annotation.returns ) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 7662b22d..e32348a7 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -608,7 +608,45 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) def test_annotate_functions(self, stub: str, before: str, after: str) -> None: context = CodemodContext() - ApplyTypeAnnotationsVisitor.add_stub_to_context( + ApplyTypeAnnotationsVisitor.store_stub_in_context( context, parse_module(textwrap.dedent(stub.rstrip())) ) self.assertCodemod(before, after, context_override=context) + + @data_provider( + ( + ( + """ + def fully_annotated_with_different_stub(a: bool, b: bool) -> str: ... + """, + """ + def fully_annotated_with_different_stub(a: int, b: str) -> bool: + return 'hello' + """, + """ + def fully_annotated_with_different_stub(a: bool, b: bool) -> str: + return 'hello' + """, + ), + ) + ) + def test_annotate_functions_with_existing_annotations( + self, stub: str, before: str, after: str + ) -> None: + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, parse_module(textwrap.dedent(stub.rstrip())) + ) + # Test setting the overwrite flag on the codemod instance. + self.assertCodemod( + before, after, context_override=context, overwrite_existing_annotations=True + ) + + # Test setting the flag when storing the stub in the context. + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, + parse_module(textwrap.dedent(stub.rstrip())), + overwrite_existing_annotations=True, + ) + self.assertCodemod(before, after, context_override=context) From 3c5aa268393952829bb7623b4bf4e865f20b1d2a Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Tue, 12 May 2020 09:40:47 -0700 Subject: [PATCH 013/632] [release] Bump LibCST to new release 0.3.4 --- CHANGELOG.md | 14 ++++++++++++++ libcst/_version.py | 2 +- libcst/codemod/visitors/_apply_type_annotations.py | 11 +++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f220e3b..fb199841 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,17 @@ +# 0.3.5 - 2020-05-12 + +## Updated + - Expose more granular `Assignments` and `Accesses` for dotted imports in `ScopeProvider`. [#284](https://github.com/Instagram/LibCST/pull/284) + - `get_qualified_names_for` returns the most appropriate qualified name. [#290](https://github.com/Instagram/LibCST/pull/290) + - Surface `SyntaxError` raised by formatter in codemod run. [#288](https://github.com/Instagram/LibCST/pull/288) [#289](https://github.com/Instagram/LibCST/pull/289) + - Rename `ApplyTypeAnnotationsVisitor.add_stub_to_context` as `ApplyTypeAnnotationsVisitor.store_stub_in_context` and add `overwrite_existing_annotations` to allow overwrite existing type annotations. [#289](https://github.com/Instagram/LibCST/pull/291) + +## Fixed + - Close opened file handles on finishing codemod to avoid `Too many open files` on OSX. [#283](https://github.com/Instagram/LibCST/pull/283) + +## Deprecated + - `ApplyTypeAnnotationsVisitor.add_stub_to_context` is renamed as `ApplyTypeAnnotationsVisitor.store_stub_in_context`. + # 0.3.4 - 2020-03-27 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index 48ad11f7..5cffd469 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.4" +LIBCST_VERSION: str = "0.3.5" diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 39754af7..2c9a0f82 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -249,6 +249,17 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # insert top-level annotations. self.import_statements: List[cst.ImportFrom] = [] + @staticmethod + def store_stub_in_context( + context: CodemodContext, + stub: cst.Module, + overwrite_existing_annotations: bool = False, + ) -> None: + # deprecated, should be removed in 0.4 release. + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, stub, overwrite_existing_annotations + ) + @staticmethod def store_stub_in_context( context: CodemodContext, From 746226528bb454580f434b2be5194c851da98248 Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Wed, 27 May 2020 09:16:17 -0400 Subject: [PATCH 014/632] Add NamedTuple to dataclass conversion codemod (#299) Add NamedTuple to dataclass conversion codemod. --- .../convert_namedtuple_to_dataclass.py | 74 ++++++++ .../test_convert_namedtuple_to_dataclass.py | 179 ++++++++++++++++++ 2 files changed, 253 insertions(+) create mode 100644 libcst/codemod/commands/convert_namedtuple_to_dataclass.py create mode 100644 libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py diff --git a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py new file mode 100644 index 00000000..80c6eb52 --- /dev/null +++ b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py @@ -0,0 +1,74 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict +from typing import List, Optional, Sequence + +import libcst as cst +from libcst.codemod import VisitorBasedCodemodCommand +from libcst.codemod.visitors import AddImportsVisitor, RemoveImportsVisitor +from libcst.metadata import ( + ProviderT, + QualifiedName, + QualifiedNameProvider, + QualifiedNameSource, +) + + +class ConvertNamedTupleToDataclassCommand(VisitorBasedCodemodCommand): + """ + Convert NamedTuple class declarations to Python 3.7 dataclasses. + + This only performs a conversion at the class declaration level. + It does not perform type annotation conversions, nor does it convert + NamedTuple-specific attributes and methods. + """ + + DESCRIPTION: str = "Convert NamedTuple class declarations to Python 3.7 dataclasses using the @dataclass decorator." + METADATA_DEPENDENCIES: Sequence[ProviderT] = (QualifiedNameProvider,) + + # The 'NamedTuple' we are interested in + qualified_namedtuple: QualifiedName = QualifiedName( + name="typing.NamedTuple", source=QualifiedNameSource.IMPORT + ) + + def leave_ClassDef( + self, original_node: cst.ClassDef, updated_node: cst.ClassDef + ) -> cst.ClassDef: + new_bases: List[cst.Arg] = [] + namedtuple_base: Optional[cst.Arg] = None + + # Need to examine the original node's bases since they are directly tied to import metadata + for base_class in original_node.bases: + # Compare the base class's qualified name against the expected typing.NamedTuple + if not QualifiedNameProvider.has_name( + self, base_class.value, self.qualified_namedtuple + ): + # Keep all bases that are not of type typing.NamedTuple + new_bases.append(base_class) + else: + namedtuple_base = base_class + + # We still want to return the updated node in case some of its children have been modified + if namedtuple_base is None: + return updated_node + + AddImportsVisitor.add_needed_import(self.context, "dataclasses", "dataclass") + RemoveImportsVisitor.remove_unused_import_by_node( + self.context, namedtuple_base.value + ) + + call = cst.ensure_type( + cst.parse_expression( + "dataclass(frozen=True)", config=self.module.config_for_parsing + ), + cst.Call, + ) + return updated_node.with_changes( + lpar=cst.MaybeSentinel.DEFAULT, + rpar=cst.MaybeSentinel.DEFAULT, + bases=new_bases, + decorators=[*original_node.decorators, cst.Decorator(decorator=call)], + ) diff --git a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py new file mode 100644 index 00000000..653d2fd4 --- /dev/null +++ b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py @@ -0,0 +1,179 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict +from libcst.codemod import CodemodTest +from libcst.codemod.commands.convert_namedtuple_to_dataclass import ( + ConvertNamedTupleToDataclassCommand, +) + + +class ConvertNamedTupleToDataclassCommandTest(CodemodTest): + + TRANSFORM = ConvertNamedTupleToDataclassCommand + + def test_no_change(self) -> None: + """ + Should result in no change as there are no children of NamedTuple. + """ + + before = """ + @dataclass(frozen=True) + class Foo: + pass + """ + after = """ + @dataclass(frozen=True) + class Foo: + pass + """ + self.assertCodemod(before, after) + + def test_change(self) -> None: + """ + Should remove the NamedTuple import along with its use as a base class for Foo. + Should import dataclasses.dataclass and annotate Foo. + """ + + before = """ + from typing import NamedTuple + + class Foo(NamedTuple): + pass + """ + after = """ + from dataclasses import dataclass + + @dataclass(frozen=True) + class Foo: + pass + """ + self.assertCodemod(before, after) + + def test_with_decorator_already(self) -> None: + """ + Should retain existing decorator. + """ + + before = """ + from typing import NamedTuple + + @other_decorator + class Foo(NamedTuple): + pass + """ + after = """ + from dataclasses import dataclass + + @other_decorator + @dataclass(frozen=True) + class Foo: + pass + """ + self.assertCodemod(before, after) + + def test_multiple_bases(self) -> None: + """ + Should retain all existing bases other than NamedTuple. + """ + + before = """ + from typing import NamedTuple + + class Foo(NamedTuple, OtherBase, YetAnotherBase): + pass + """ + after = """ + from dataclasses import dataclass + + @dataclass(frozen=True) + class Foo(OtherBase, YetAnotherBase): + pass + """ + self.assertCodemod(before, after) + + def test_nested_classes(self) -> None: + """ + Should perform expected changes on inner classes. + """ + + before = """ + from typing import NamedTuple + + class OuterClass: + class InnerClass(NamedTuple): + pass + """ + after = """ + from dataclasses import dataclass + + class OuterClass: + @dataclass(frozen=True) + class InnerClass: + pass + """ + self.assertCodemod(before, after) + + def test_aliased_object_import(self) -> None: + """ + Should detect aliased NamedTuple object import and base. + """ + + before = """ + from typing import NamedTuple as nt + + class Foo(nt): + pass + """ + after = """ + from dataclasses import dataclass + + @dataclass(frozen=True) + class Foo: + pass + """ + self.assertCodemod(before, after) + + def test_aliased_module_import(self) -> None: + """ + Should detect aliased `typing` module import and base. + """ + + before = """ + import typing as typ + + class Foo(typ.NamedTuple): + pass + """ + after = """ + from dataclasses import dataclass + + @dataclass(frozen=True) + class Foo: + pass + """ + self.assertCodemod(before, after) + + def test_other_unused_imports_not_removed(self) -> None: + """ + Should not remove any imports other than NamedTuple, even if they are also unused. + """ + + before = """ + from typing import NamedTuple + import SomeUnusedImport + + class Foo(NamedTuple): + pass + """ + after = """ + import SomeUnusedImport + from dataclasses import dataclass + + @dataclass(frozen=True) + class Foo: + pass + """ + self.assertCodemod(before, after) From f32389a126e60c93335815735516b83251e6b3b8 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 28 May 2020 00:39:30 +0100 Subject: [PATCH 015/632] Fix StopIteration exception during scope analysis (#298) During scope analysis all attribute accesses are collected for matching on import names. The matching code (specifically `_gen_dotted_names`) was not prepared for all types of expressions. In particular, complex expressions like `foo[0].bar.baz()` caused a `StopIteration` exception when `_gen_dotted_names` calls itself recursively. The nested call doesn't yield any values, and so calling `next()` on it raises. This commit fixes these types of errors. --- libcst/metadata/scope_provider.py | 14 ++++++++------ libcst/metadata/tests/test_scope_provider.py | 5 +++++ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 0d332886..56c1b252 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -616,7 +616,10 @@ def _gen_dotted_names( # this is not an import return name_values = iter(_gen_dotted_names(value)) - (next_name, next_node) = next(name_values) + next_pair = next(name_values, None) + if next_pair is None: + return + (next_name, next_node) = next_pair yield (f"{next_name}.{node.attr.value}", node) yield (next_name, next_node) yield from name_values @@ -825,16 +828,15 @@ class ScopeVisitor(cst.CSTVisitor): # the time complexity is O(m x n), this optimizes it as O(m + n). scope_name_accesses = defaultdict(set) for (access, enclosing_attribute) in self.__deferred_accesses: + name = ensure_type(access.node, cst.Name).value if enclosing_attribute is not None: - name = None + # if _gen_dotted_names doesn't generate any values, fall back to + # the original name node above for name, node in _gen_dotted_names(enclosing_attribute): if name in access.scope: access.node = node break - if name is None: - continue - else: - name = ensure_type(access.node, cst.Name).value + scope_name_accesses[(access.scope, name)].add(access) access.record_assignments(access.scope[name]) access.scope.record_access(name, access) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 1aaff4eb..34be54ad 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1109,6 +1109,11 @@ class ScopeProviderTest(UnitTest): def test_attribute_of_function_call(self) -> None: get_scope_metadata_provider("foo().bar") + def test_attribute_of_subscript_called(self) -> None: + m, scopes = get_scope_metadata_provider("foo[0].bar.baz()") + scope = scopes[m] + self.assertIn("foo", scope.accesses) + def test_self(self) -> None: with open(__file__) as f: get_scope_metadata_provider(f.read()) From 1a66b41fe9664f7bbb63cf72a5c7ecf6ae62265b Mon Sep 17 00:00:00 2001 From: lrjball <50599110+lrjball@users.noreply.github.com> Date: Thu, 28 May 2020 01:15:27 +0100 Subject: [PATCH 016/632] Fixed typo in the file name (precent > percent) (#301) * Fixed typo in the file name (precent > percent) Noticed the typo when using `python -m libcst.tool list`, hopefully this fix won't break backwards compatibility! * fixed typo in test as well --- ...ormat_to_fstring.py => convert_percent_format_to_fstring.py} | 0 .../commands/tests/test_convert_percent_format_to_fstring.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename libcst/codemod/commands/{convert_precent_format_to_fstring.py => convert_percent_format_to_fstring.py} (100%) diff --git a/libcst/codemod/commands/convert_precent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py similarity index 100% rename from libcst/codemod/commands/convert_precent_format_to_fstring.py rename to libcst/codemod/commands/convert_percent_format_to_fstring.py diff --git a/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py b/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py index a9e5ff09..0dd79d19 100644 --- a/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py @@ -5,7 +5,7 @@ # # pyre-strict from libcst.codemod import CodemodTest -from libcst.codemod.commands.convert_precent_format_to_fstring import ( +from libcst.codemod.commands.convert_percent_format_to_fstring import ( ConvertPercentFormatStringCommand, ) From b9d4629f9142983bf4c342e082fec394005025a4 Mon Sep 17 00:00:00 2001 From: jimmylai Date: Thu, 28 May 2020 09:07:30 -0700 Subject: [PATCH 017/632] [scope] remove iter call to be efficient (#302) Co-authored-by: Jimmy Lai --- libcst/metadata/scope_provider.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 56c1b252..db25d5e2 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -14,7 +14,6 @@ from enum import Enum, auto from typing import ( Collection, Dict, - Iterable, Iterator, List, Mapping, @@ -607,22 +606,23 @@ class ComprehensionScope(LocalScope): # each string has the corresponding CSTNode attached to it def _gen_dotted_names( node: Union[cst.Attribute, cst.Name] -) -> Iterable[Tuple[str, Union[cst.Attribute, cst.Name]]]: +) -> Iterator[Tuple[str, Union[cst.Attribute, cst.Name]]]: if isinstance(node, cst.Name): - yield (node.value, node) + yield node.value, node else: value = node.value if not isinstance(value, (cst.Attribute, cst.Name)): # this is not an import return - name_values = iter(_gen_dotted_names(value)) - next_pair = next(name_values, None) - if next_pair is None: + name_values = _gen_dotted_names(value) + try: + next_name, next_node = next(name_values) + except StopIteration: return - (next_name, next_node) = next_pair - yield (f"{next_name}.{node.attr.value}", node) - yield (next_name, next_node) - yield from name_values + else: + yield f"{next_name}.{node.attr.value}", node + yield next_name, next_node + yield from name_values class ScopeVisitor(cst.CSTVisitor): From 666247a192e767fa638b943d39643c0f9d6d2f09 Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Thu, 28 May 2020 16:53:26 -0400 Subject: [PATCH 018/632] [release] Bump LibCST to new release 0.3.6 (#300) * Bump LibCST to new release 0.3.6 * Increase version number * Some more fixes to include in CHANGELOG * Update CHANGELOG.md Co-authored-by: jimmylai Co-authored-by: jimmylai --- CHANGELOG.md | 11 ++++++++++- libcst/_version.py | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fb199841..c4442fb9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,12 @@ +# 0.3.6 - 2020-05-27 + +## Added + - Added `ConvertNamedTupleToDataclassCommand` to convert `NamedTuple` class declarations to Python 3.7 `dataclasses` using the `@dataclass(frozen=True)` decorator. [#299](https://github.com/Instagram/LibCST/pull/299) + +## Fixed + - Fixed typo in file name `libcst/codemod/commands/convert_percent_format_to_fstring.py`. [#301](https://github.com/Instagram/LibCST/pull/301) + - Fixed `StopIteration` exception during scope analysis matching on import names. [#302](https://github.com/Instagram/LibCST/pull/302) + # 0.3.5 - 2020-05-12 ## Updated @@ -32,7 +41,7 @@ # 0.3.3 - 2020-03-05 ## Added - - `ByteSpanPositionProvider` provides start offset and length of CSTNode as metadata. + - `ByteSpanPositionProvider` provides start offset and length of CSTNode as metadata. - `get_docstring` helper provides docstring from `Module`, `ClassDef` and `FunctionDef` node types. ## Updated diff --git a/libcst/_version.py b/libcst/_version.py index 5cffd469..7e23db77 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.5" +LIBCST_VERSION: str = "0.3.6" From ed1a2ad4d1611501d7f234692e9b5496eafa7026 Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Thu, 4 Jun 2020 14:27:54 -0400 Subject: [PATCH 019/632] Add fb open source required links (#307) Links to Privacy Policy and Terms of Use in README.rst and docs/index --- README.rst | 20 +++++++++++++++----- docs/source/index.rst | 6 ++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index a489a719..0fa3edf4 100644 --- a/README.rst +++ b/README.rst @@ -47,9 +47,9 @@ fields, we've created a lossless CST that looks and feels like an AST. .. why-libcst-intro-end -You can learn more about `the value that LibCST provides -`__ and `our -motivations for the project +You can learn more about `the value that LibCST provides +`__ and `our +motivations for the project `__ in `our documentation `__. Try it out with `notebook examples `__. @@ -115,14 +115,14 @@ BinaryOperation( ], ) -For a more detailed usage example, `see our documentation +For a more detailed usage example, `see our documentation `__. Installation ------------ LibCST requires Python 3.6+ and can be easily installed using most common Python -packaging tools. We recommend installing the latest stable release from +packaging tools. We recommend installing the latest stable release from `PyPI `_ with pip: .. code-block:: shell @@ -209,6 +209,16 @@ License LibCST is MIT licensed, as found in the LICENSE file. +.. fb-docs-start + +Privacy Policy and Terms of Use +=============================== + +- `Privacy Policy `_ +- `Terms of Use `_ + +.. fb-docs-end + Acknowledgements ================ diff --git a/docs/source/index.rst b/docs/source/index.rst index 9844dd0a..7c604786 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -45,9 +45,15 @@ LibCST experimental + Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` + + +.. include:: ../../README.rst + :start-after: fb-docs-start + :end-before: fb-docs-end From dcdb04660f2b954b78db97b750cda26e96c825c9 Mon Sep 17 00:00:00 2001 From: jimmylai Date: Wed, 10 Jun 2020 09:07:57 -0700 Subject: [PATCH 020/632] Upgrade Codecov to 2.1.4 (#311) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 3584e616..6edf95da 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,5 @@ black==19.10b0 -codecov==2.0.15 +codecov==2.1.4 coverage==4.5.4 flake8==3.7.8 hypothesis==4.36.0 From 228589faa05b5612acea9d3ae1b99df6ea37a55f Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Wed, 10 Jun 2020 12:46:35 -0400 Subject: [PATCH 021/632] Fix stdout being plugged into codemod-ed file (#309) --- libcst/codemod/_cli.py | 1 - 1 file changed, 1 deletion(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 5c134019..67355a19 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -58,7 +58,6 @@ def invoke_formatter(formatter_args: Sequence[str], code: AnyStr) -> AnyStr: formatter_args, env={}, input=code, - stderr=subprocess.STDOUT, universal_newlines=not work_with_bytes, encoding=None if work_with_bytes else "utf-8", ), From dbcb5bed996e8fa075facc16e52938cca99b2325 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Wed, 10 Jun 2020 10:29:58 -0700 Subject: [PATCH 022/632] Improve default-version selection logic (#306) * Add Python 3.9 to tox envlist * Require newer typing_extensions for 3.9 For simplicity, use the new version in all cases. * Improve default-version selection to work on 3.9 While were at it, improve the code to work with a likely 3.10 by allowing multiple digits for minor version. --- libcst/_parser/parso/utils.py | 2 +- libcst/_parser/tests/test_config.py | 36 ++++++++++++++++++++++ libcst/_parser/types/config.py | 47 ++++++++++++++++++++++------- requirements.txt | 2 +- setup.py | 2 +- tox.ini | 2 +- 6 files changed, 76 insertions(+), 15 deletions(-) create mode 100644 libcst/_parser/tests/test_config.py diff --git a/libcst/_parser/parso/utils.py b/libcst/_parser/parso/utils.py index 827869a0..6cf233c4 100644 --- a/libcst/_parser/parso/utils.py +++ b/libcst/_parser/parso/utils.py @@ -182,7 +182,7 @@ class PythonVersionInfo: def _parse_version(version: str) -> PythonVersionInfo: - match = re.match(r"(\d+)(?:\.(\d)(?:\.\d+)?)?$", version) + match = re.match(r"(\d+)(?:\.(\d+)(?:\.\d+)?)?$", version) if match is None: raise ValueError( "The given version is not in the right format. " diff --git a/libcst/_parser/tests/test_config.py b/libcst/_parser/tests/test_config.py new file mode 100644 index 00000000..78692369 --- /dev/null +++ b/libcst/_parser/tests/test_config.py @@ -0,0 +1,36 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-strict +from libcst._parser.parso.utils import PythonVersionInfo +from libcst._parser.types.config import _pick_compatible_python_version +from libcst.testing.utils import UnitTest + + +class ConfigTest(UnitTest): + def test_pick_compatible(self) -> None: + self.assertEqual( + PythonVersionInfo(3, 1), _pick_compatible_python_version("3.2") + ) + self.assertEqual( + PythonVersionInfo(3, 1), _pick_compatible_python_version("3.1") + ) + self.assertEqual( + PythonVersionInfo(3, 8), _pick_compatible_python_version("3.9") + ) + self.assertEqual( + PythonVersionInfo(3, 8), _pick_compatible_python_version("3.10") + ) + self.assertEqual( + PythonVersionInfo(3, 8), _pick_compatible_python_version("4.0") + ) + with self.assertRaisesRegex( + ValueError, + ( + r"No version found older than 1\.0 \(PythonVersionInfo\(" + + r"major=1, minor=0\)\) while running on" + ), + ): + _pick_compatible_python_version("1.0") diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index facb9ee2..5a417f91 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -8,9 +8,10 @@ import abc import codecs import re +import sys from dataclasses import dataclass, field, fields from enum import Enum -from typing import FrozenSet, List, Pattern, Sequence, Union +from typing import FrozenSet, List, Optional, Pattern, Sequence, Union from libcst._add_slots import add_slots from libcst._nodes.whitespace import NEWLINE_RE @@ -59,6 +60,7 @@ class AutoConfig(Enum): return str(self) +# This list should be kept in sorted order. KNOWN_PYTHON_VERSION_STRINGS = ["3.0", "3.1", "3.3", "3.5", "3.6", "3.7", "3.8"] @@ -87,7 +89,11 @@ class PartialParserConfig: #: run LibCST. For example, you can parse code as 3.7 with a CPython 3.6 #: interpreter. #: + #: If unspecified, it will default to the syntax of the running interpreter + #: (rounding down from among the following list). + #: #: Currently, only Python 3.0, 3.1, 3.3, 3.5, 3.6, 3.7 and 3.8 syntax is supported. + #: The gaps did not have any syntax changes from the version prior. python_version: Union[str, AutoConfig] = AutoConfig.token #: A named tuple with the ``major`` and ``minor`` Python version numbers. This is @@ -113,17 +119,20 @@ class PartialParserConfig: def __post_init__(self) -> None: raw_python_version = self.python_version - # `parse_version_string` will raise a ValueError if the version is invalid. - # - # We use object.__setattr__ because the dataclass is frozen. See: - # https://docs.python.org/3/library/dataclasses.html#frozen-instances - # This should be safe behavior inside of `__post_init__`. - parsed_python_version = parse_version_string( - None if isinstance(raw_python_version, AutoConfig) else raw_python_version - ) - # Once we add support for more versions of Python, we can change this to detect - # the supported version range. + if isinstance(raw_python_version, AutoConfig): + # If unspecified, we'll try to pick the same as the running + # interpreter. There will always be at least one entry. + parsed_python_version = _pick_compatible_python_version() + else: + # If the caller specified a version, we require that to be a known + # version (because we don't want to encourage doing duplicate work + # when there weren't syntax changes). + + # `parse_version_string` will raise a ValueError if the version is + # invalid. + parsed_python_version = parse_version_string(raw_python_version) + if not any( parsed_python_version == parse_version_string(v) for v in KNOWN_PYTHON_VERSION_STRINGS @@ -135,6 +144,9 @@ class PartialParserConfig: + "supported by future releases." ) + # We use object.__setattr__ because the dataclass is frozen. See: + # https://docs.python.org/3/library/dataclasses.html#frozen-instances + # This should be safe behavior inside of `__post_init__`. object.__setattr__(self, "parsed_python_version", parsed_python_version) encoding = self.encoding @@ -170,3 +182,16 @@ class PartialParserConfig: init_keys.append(f"{f.name}={value!r}") return f"{self.__class__.__name__}({', '.join(init_keys)})" + + +def _pick_compatible_python_version(version: Optional[str] = None) -> PythonVersionInfo: + max_version = parse_version_string(version) + for v in KNOWN_PYTHON_VERSION_STRINGS[::-1]: + tmp = parse_version_string(v) + if tmp <= max_version: + return tmp + + raise ValueError( + f"No version found older than {version} ({max_version}) while " + + f"running on {sys.version_info}" + ) diff --git a/requirements.txt b/requirements.txt index 92d4d1ff..752481bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ dataclasses==0.6.0; python_version < '3.7' -typing_extensions==3.7.2 +typing_extensions==3.7.4.2 typing_inspect==0.4.0 pyyaml==5.2 diff --git a/setup.py b/setup.py index 8e0fa881..c21dc8b2 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ setuptools.setup( python_requires=">=3.6", install_requires=[ "dataclasses; python_version < '3.7'", - "typing_extensions >= 3.7.2", + "typing_extensions >= 3.7.4.2", "typing_inspect >= 0.4.0", "pyyaml >= 5.2", ], diff --git a/tox.ini b/tox.ini index 85406553..43da6fb0 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py36, py37, py38, lint, docs +envlist = py36, py37, py38, py39, lint, docs [testenv] deps = From cc7bb9db14b65b758aa67cd37e786784babc3519 Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Thu, 11 Jun 2020 12:04:10 -0400 Subject: [PATCH 023/632] Add maxsplit value in QualifiedName retrieval (#312) Maxsplit value to account for repeated letters in a QualifiedName --- libcst/metadata/scope_provider.py | 2 +- .../tests/test_qualified_name_provider.py | 24 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index db25d5e2..302cdd44 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -248,7 +248,7 @@ class _NameUtil: if eval_alias is not None: as_name = eval_alias if full_name.startswith(as_name): - remaining_name = full_name.split(as_name)[1].lstrip(".") + remaining_name = full_name.split(as_name, 1)[1].lstrip(".") results.add( QualifiedName( f"{real_name}.{remaining_name}" diff --git a/libcst/metadata/tests/test_qualified_name_provider.py b/libcst/metadata/tests/test_qualified_name_provider.py index 5bb158f2..27325e9d 100644 --- a/libcst/metadata/tests/test_qualified_name_provider.py +++ b/libcst/metadata/tests/test_qualified_name_provider.py @@ -264,3 +264,27 @@ class ScopeProviderTest(UnitTest): ) eval = attr.attr self.assertEqual(names[eval], set()) + + def test_repeated_values_in_qualified_name(self) -> None: + m, names = get_qualified_name_metadata_provider( + """ + import a + class Foo: + bar: a.aa.aaa + """ + ) + foo = ensure_type(m.body[1], cst.ClassDef) + bar = ensure_type( + ensure_type( + ensure_type(foo.body, cst.IndentedBlock).body[0], + cst.SimpleStatementLine, + ).body[0], + cst.AnnAssign, + ) + + annotation = ensure_type(bar.annotation, cst.Annotation) + attribute = ensure_type(annotation.annotation, cst.Attribute) + + self.assertEqual( + names[attribute], {QualifiedName("a.aa.aaa", QualifiedNameSource.IMPORT)} + ) From c023fa7c4caff3fd2b3946080f9a58b539b10363 Mon Sep 17 00:00:00 2001 From: jimmylai Date: Fri, 12 Jun 2020 18:24:18 -0700 Subject: [PATCH 024/632] [typing] enable Pyre strict mode by default (#313) Co-authored-by: Jimmy Lai --- .circleci/.pyre_configuration | 3 +- .pyre_configuration.example | 1 + docs/source/conf.py | 1 + libcst/__init__.py | 1 - libcst/_add_slots.py | 1 - libcst/_batched_visitor.py | 1 - libcst/_exceptions.py | 1 - libcst/_maybe_sentinel.py | 1 - libcst/_metadata_dependent.py | 1 - libcst/_nodes/__init__.py | 1 - libcst/_nodes/base.py | 1 + libcst/_nodes/deep_equals.py | 1 - libcst/_nodes/expression.py | 4 +- libcst/_nodes/internal.py | 1 - libcst/_nodes/module.py | 1 + libcst/_nodes/op.py | 1 - libcst/_nodes/tests/base.py | 1 - libcst/_nodes/tests/test_assert.py | 1 - libcst/_nodes/tests/test_assign.py | 1 - libcst/_nodes/tests/test_atom.py | 1 - libcst/_nodes/tests/test_attribute.py | 1 - libcst/_nodes/tests/test_await.py | 1 - libcst/_nodes/tests/test_binary_op.py | 1 - libcst/_nodes/tests/test_boolean_op.py | 1 - libcst/_nodes/tests/test_call.py | 1 - libcst/_nodes/tests/test_classdef.py | 1 - libcst/_nodes/tests/test_comment.py | 1 - libcst/_nodes/tests/test_comparison.py | 1 - libcst/_nodes/tests/test_cst_node.py | 1 - libcst/_nodes/tests/test_del.py | 1 - libcst/_nodes/tests/test_dict.py | 1 - libcst/_nodes/tests/test_dict_comp.py | 1 - libcst/_nodes/tests/test_docstring.py | 1 - libcst/_nodes/tests/test_else.py | 1 - libcst/_nodes/tests/test_empty_line.py | 1 - libcst/_nodes/tests/test_for.py | 1 - libcst/_nodes/tests/test_funcdef.py | 1 - libcst/_nodes/tests/test_global.py | 1 - libcst/_nodes/tests/test_if.py | 1 - libcst/_nodes/tests/test_ifexp.py | 1 - libcst/_nodes/tests/test_import.py | 1 - libcst/_nodes/tests/test_indented_block.py | 1 - libcst/_nodes/tests/test_lambda.py | 1 - .../tests/test_leaf_small_statements.py | 1 - libcst/_nodes/tests/test_list.py | 1 - libcst/_nodes/tests/test_matrix_multiply.py | 1 - libcst/_nodes/tests/test_module.py | 1 - libcst/_nodes/tests/test_namedexpr.py | 1 - libcst/_nodes/tests/test_newline.py | 1 - libcst/_nodes/tests/test_nonlocal.py | 1 - libcst/_nodes/tests/test_number.py | 1 - libcst/_nodes/tests/test_raise.py | 1 - libcst/_nodes/tests/test_removal_behavior.py | 1 - libcst/_nodes/tests/test_return.py | 1 - libcst/_nodes/tests/test_set.py | 1 - libcst/_nodes/tests/test_simple_comp.py | 1 - libcst/_nodes/tests/test_simple_statement.py | 1 - libcst/_nodes/tests/test_simple_whitespace.py | 1 - libcst/_nodes/tests/test_small_statement.py | 1 - libcst/_nodes/tests/test_subscript.py | 1 - .../_nodes/tests/test_trailing_whitespace.py | 1 - libcst/_nodes/tests/test_try.py | 1 - libcst/_nodes/tests/test_tuple.py | 1 - libcst/_nodes/tests/test_unary_op.py | 1 - libcst/_nodes/tests/test_while.py | 1 - libcst/_nodes/tests/test_with.py | 1 - libcst/_nodes/tests/test_yield.py | 1 - libcst/_nodes/whitespace.py | 1 - libcst/_parser/__init__.py | 2 - libcst/_parser/base_parser.py | 1 - libcst/_parser/conversions/expression.py | 4 + libcst/_parser/conversions/module.py | 1 + libcst/_parser/conversions/params.py | 1 + libcst/_parser/conversions/statement.py | 1 + libcst/_parser/conversions/terminals.py | 1 + libcst/_parser/custom_itertools.py | 1 - libcst/_parser/detect_config.py | 2 +- libcst/_parser/entrypoints.py | 1 - libcst/_parser/grammar.py | 1 - libcst/_parser/parso/pgen2/generator.py | 1 + libcst/_parser/parso/pgen2/grammar_parser.py | 1 + libcst/_parser/parso/python/token.py | 2 +- libcst/_parser/parso/python/tokenize.py | 1 + libcst/_parser/parso/tests/test_fstring.py | 1 + libcst/_parser/parso/tests/test_tokenize.py | 1 + libcst/_parser/parso/tests/test_utils.py | 1 + libcst/_parser/parso/utils.py | 1 - libcst/_parser/production_decorator.py | 1 - libcst/_parser/python_parser.py | 1 + libcst/_parser/tests/test_detect_config.py | 1 - libcst/_parser/tests/test_footer_behavior.py | 1 - libcst/_parser/tests/test_parse_errors.py | 1 - .../_parser/tests/test_whitespace_parser.py | 1 - libcst/_parser/tests/test_wrapped_tokenize.py | 1 + libcst/_parser/types/config.py | 1 - libcst/_parser/types/conversions.py | 2 + libcst/_parser/types/partials.py | 1 - libcst/_parser/types/production.py | 1 - libcst/_parser/types/tests/test_config.py | 1 - libcst/_parser/types/token.py | 1 - libcst/_parser/types/whitespace_state.py | 1 - libcst/_parser/whitespace_parser.py | 1 - libcst/_parser/wrapped_tokenize.py | 1 - libcst/_position.py | 1 - libcst/_removal_sentinel.py | 1 - libcst/_tabs.py | 2 - libcst/_typed_visitor.py | 1 - libcst/_typed_visitor_base.py | 1 - libcst/_types.py | 2 - libcst/_visitors.py | 1 - libcst/codegen/gather.py | 1 - libcst/codegen/gen_matcher_classes.py | 2 - libcst/codegen/gen_type_mapping.py | 2 - libcst/codegen/gen_visitor_functions.py | 2 - libcst/codegen/generate.py | 1 - libcst/codegen/tests/test_codegen_clean.py | 1 - libcst/codegen/transforms.py | 1 - libcst/codemod/__init__.py | 1 - libcst/codemod/_cli.py | 1 - libcst/codemod/_codemod.py | 1 - libcst/codemod/_command.py | 1 - libcst/codemod/_context.py | 1 - libcst/codemod/_runner.py | 1 - libcst/codemod/_testing.py | 1 - libcst/codemod/_visitor.py | 1 - libcst/codemod/commands/__init__.py | 1 - libcst/codemod/commands/add_pyre_directive.py | 1 - .../commands/convert_format_to_fstring.py | 1 - .../convert_namedtuple_to_dataclass.py | 1 - .../convert_percent_format_to_fstring.py | 1 - .../codemod/commands/ensure_import_present.py | 1 - .../codemod/commands/fix_pyre_directives.py | 1 - libcst/codemod/commands/noop.py | 1 - .../codemod/commands/remove_pyre_directive.py | 1 - .../codemod/commands/remove_unused_imports.py | 1 - .../commands/strip_strings_from_types.py | 1 - libcst/codemod/commands/tests/__init__.py | 1 - .../commands/tests/test_add_pyre_directive.py | 1 - .../tests/test_convert_format_to_fstring.py | 1 - .../test_convert_namedtuple_to_dataclass.py | 1 - .../test_convert_percent_format_to_fstring.py | 1 - .../tests/test_ensure_import_present.py | 1 - .../tests/test_fix_pyre_directives.py | 1 - libcst/codemod/commands/tests/test_noop.py | 1 - .../tests/test_remove_pyre_directive.py | 1 - .../tests/test_remove_unused_imports.py | 1 - .../tests/test_strip_strings_from_types.py | 1 - .../tests/test_unnecessary_format_string.py | 1 - .../commands/unnecessary_format_string.py | 1 - libcst/codemod/tests/__init__.py | 1 - libcst/codemod/tests/test_cli.py | 1 - libcst/codemod/tests/test_codemod.py | 1 - libcst/codemod/tests/test_codemod_cli.py | 1 - libcst/codemod/tests/test_metadata.py | 1 - libcst/codemod/tests/test_runner.py | 1 - libcst/codemod/visitors/__init__.py | 1 - libcst/codemod/visitors/_add_imports.py | 1 - .../visitors/_apply_type_annotations.py | 1 - libcst/codemod/visitors/_gather_exports.py | 1 - libcst/codemod/visitors/_gather_imports.py | 1 - libcst/codemod/visitors/_remove_imports.py | 1 - libcst/codemod/visitors/tests/__init__.py | 1 - .../visitors/tests/test_add_imports.py | 1 - .../tests/test_apply_type_annotations.py | 1 - .../visitors/tests/test_gather_exports.py | 1 - .../visitors/tests/test_gather_imports.py | 1 - .../visitors/tests/test_remove_imports.py | 1 - libcst/helpers/__init__.py | 1 - libcst/helpers/_statement.py | 1 - libcst/helpers/_template.py | 1 - libcst/helpers/common.py | 1 - libcst/helpers/expression.py | 1 - libcst/helpers/module.py | 1 - libcst/helpers/tests/test_expression.py | 1 - libcst/helpers/tests/test_module.py | 1 - libcst/helpers/tests/test_statement.py | 1 - libcst/helpers/tests/test_template.py | 1 - libcst/matchers/__init__.py | 1 - libcst/matchers/_decorators.py | 1 - libcst/matchers/_matcher_base.py | 1 - libcst/matchers/_return_types.py | 1 - libcst/matchers/_visitors.py | 1 - libcst/matchers/tests/test_decorators.py | 1 - libcst/matchers/tests/test_extract.py | 1 - libcst/matchers/tests/test_findall.py | 1 - libcst/matchers/tests/test_matchers.py | 1 - .../tests/test_matchers_with_metadata.py | 1 - libcst/matchers/tests/test_replace.py | 1 - libcst/matchers/tests/test_visitors.py | 1 - libcst/metadata/__init__.py | 1 - libcst/metadata/base_provider.py | 1 - .../metadata/expression_context_provider.py | 1 - libcst/metadata/full_repo_manager.py | 2 - libcst/metadata/parent_node_provider.py | 1 - libcst/metadata/position_provider.py | 1 - libcst/metadata/reentrant_codegen.py | 1 - libcst/metadata/scope_provider.py | 1 - libcst/metadata/span_provider.py | 1 - libcst/metadata/tests/__init__.py | 2 - libcst/metadata/tests/test_base_provider.py | 1 - .../tests/test_expression_context_provider.py | 1 - .../metadata/tests/test_full_repo_manager.py | 1 - .../metadata/tests/test_metadata_provider.py | 1 - .../metadata/tests/test_metadata_wrapper.py | 1 - .../tests/test_parent_node_provider.py | 1 - .../metadata/tests/test_position_provider.py | 1 - .../tests/test_qualified_name_provider.py | 1 - .../metadata/tests/test_reentrant_codegen.py | 1 - libcst/metadata/tests/test_scope_provider.py | 1 - libcst/metadata/tests/test_span_provider.py | 1 - .../tests/test_type_inference_provider.py | 1 - libcst/metadata/type_inference_provider.py | 1 - libcst/metadata/wrapper.py | 1 - libcst/testing/utils.py | 1 + libcst/tests/pyre/simple_class.json | 156 +++++++++--------- libcst/tests/pyre/simple_class.py | 1 - libcst/tests/test_batched_visitor.py | 1 - libcst/tests/test_exceptions.py | 1 - libcst/tests/test_fuzz.py | 1 - libcst/tests/test_pyre_integration.py | 3 +- libcst/tests/test_tool.py | 1 - libcst/tests/test_visitor.py | 1 - libcst/tool.py | 1 - stubs/tokenize.pyi | 2 +- 224 files changed, 110 insertions(+), 291 deletions(-) diff --git a/.circleci/.pyre_configuration b/.circleci/.pyre_configuration index c0d9d22e..7ffbe4d8 100644 --- a/.circleci/.pyre_configuration +++ b/.circleci/.pyre_configuration @@ -7,5 +7,6 @@ ], "exclude": [ ".*/\\.tox/.*" - ] + ], + "strict": true } diff --git a/.pyre_configuration.example b/.pyre_configuration.example index d6331d02..0550aae6 100644 --- a/.pyre_configuration.example +++ b/.pyre_configuration.example @@ -8,4 +8,5 @@ "exclude": [ ".*/\\.tox/.*" ] + "strict": true } diff --git a/docs/source/conf.py b/docs/source/conf.py index 1cb942e4..07c7839d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe # -*- coding: utf-8 -*- # diff --git a/libcst/__init__.py b/libcst/__init__.py index 2dd3ce0f..39b0f6dc 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from libcst._batched_visitor import BatchableCSTVisitor, visit_batched from libcst._exceptions import MetadataException, ParserSyntaxError from libcst._maybe_sentinel import MaybeSentinel diff --git a/libcst/_add_slots.py b/libcst/_add_slots.py index 2df0566a..706f5d10 100644 --- a/libcst/_add_slots.py +++ b/libcst/_add_slots.py @@ -2,7 +2,6 @@ # https://github.com/ericvsmith/dataclasses/blob/ae712dd993420d43444f188f452/LICENSE.txt # https://github.com/ericvsmith/dataclasses/blob/ae712dd993420d43444f/dataclass_tools.py -# pyre-strict import dataclasses from typing import Any, Mapping, Type, TypeVar diff --git a/libcst/_batched_visitor.py b/libcst/_batched_visitor.py index 81660efc..9264c4c9 100644 --- a/libcst/_batched_visitor.py +++ b/libcst/_batched_visitor.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import inspect from typing import ( TYPE_CHECKING, diff --git a/libcst/_exceptions.py b/libcst/_exceptions.py index 899e575a..354f6715 100644 --- a/libcst/_exceptions.py +++ b/libcst/_exceptions.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from enum import Enum, auto from typing import Any, Callable, Iterable, Optional, Sequence, Tuple, Union diff --git a/libcst/_maybe_sentinel.py b/libcst/_maybe_sentinel.py index 8209dd6a..dc968f95 100644 --- a/libcst/_maybe_sentinel.py +++ b/libcst/_maybe_sentinel.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from enum import Enum, auto diff --git a/libcst/_metadata_dependent.py b/libcst/_metadata_dependent.py index 600fb3b3..c1627713 100644 --- a/libcst/_metadata_dependent.py +++ b/libcst/_metadata_dependent.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import inspect from abc import ABC from contextlib import contextmanager diff --git a/libcst/_nodes/__init__.py b/libcst/_nodes/__init__.py index 3af7950f..01f1f091 100644 --- a/libcst/_nodes/__init__.py +++ b/libcst/_nodes/__init__.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ This package contains CSTNode and all of the subclasses needed to express Python's full diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index 6478bf2e..14c0bf39 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -449,6 +449,7 @@ class CSTNode(ABC): return "\n".join(lines) @classmethod + # pyre-fixme[3]: Return annotation cannot be `Any`. def field(cls, *args: object, **kwargs: object) -> Any: """ A helper that allows us to easily use CSTNodes in dataclass constructor diff --git a/libcst/_nodes/deep_equals.py b/libcst/_nodes/deep_equals.py index 9424bbf5..1e18227c 100644 --- a/libcst/_nodes/deep_equals.py +++ b/libcst/_nodes/deep_equals.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ Provides the implementation of `CSTNode.deep_equals`. """ diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 79e6811d..b58ec233 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import re from abc import ABC, abstractmethod @@ -427,6 +426,7 @@ class Integer(BaseNumber): def _validate(self) -> None: super(Integer, self)._validate() + # pyre-fixme[16]: Module `tokenize` has no attribute `Intnumber`. if not re.fullmatch(INTNUMBER_RE, self.value): raise CSTValidationError("Number is not a valid integer.") @@ -465,6 +465,7 @@ class Float(BaseNumber): def _validate(self) -> None: super(Float, self)._validate() + # pyre-fixme[16]: Module `tokenize` has no attribute `Floatnumber`. if not re.fullmatch(FLOATNUMBER_RE, self.value): raise CSTValidationError("Number is not a valid float.") @@ -502,6 +503,7 @@ class Imaginary(BaseNumber): def _validate(self) -> None: super(Imaginary, self)._validate() + # pyre-fixme[16]: Module `tokenize` has no attribute `Imagnumber`. if not re.fullmatch(IMAGNUMBER_RE, self.value): raise CSTValidationError("Number is not a valid imaginary.") diff --git a/libcst/_nodes/internal.py b/libcst/_nodes/internal.py index 7df20b47..4b5c7b00 100644 --- a/libcst/_nodes/internal.py +++ b/libcst/_nodes/internal.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from contextlib import contextmanager from dataclasses import dataclass, field diff --git a/libcst/_nodes/module.py b/libcst/_nodes/module.py index 400d2f6b..59a4507d 100644 --- a/libcst/_nodes/module.py +++ b/libcst/_nodes/module.py @@ -80,6 +80,7 @@ class Module(CSTNode): has_trailing_newline=self.has_trailing_newline, ) + # pyre-fixme[14]: `visit` overrides method defined in `CSTNode` inconsistently. def visit(self: _ModuleSelfT, visitor: CSTVisitorT) -> _ModuleSelfT: """ Returns the result of running a visitor over this module. diff --git a/libcst/_nodes/op.py b/libcst/_nodes/op.py index 02b14172..8e927c18 100644 --- a/libcst/_nodes/op.py +++ b/libcst/_nodes/op.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from abc import ABC, abstractmethod from dataclasses import dataclass from typing import Tuple diff --git a/libcst/_nodes/tests/base.py b/libcst/_nodes/tests/base.py index fd750b9d..1a014442 100644 --- a/libcst/_nodes/tests/base.py +++ b/libcst/_nodes/tests/base.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import dataclasses from contextlib import ExitStack from dataclasses import dataclass diff --git a/libcst/_nodes/tests/test_assert.py b/libcst/_nodes/tests/test_assert.py index 4e05128c..4af2e53a 100644 --- a/libcst/_nodes/tests/test_assert.py +++ b/libcst/_nodes/tests/test_assert.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any diff --git a/libcst/_nodes/tests/test_assign.py b/libcst/_nodes/tests/test_assign.py index 52c13f6a..aad6f979 100644 --- a/libcst/_nodes/tests/test_assign.py +++ b/libcst/_nodes/tests/test_assign.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index 0fc2fdf6..a35790eb 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any diff --git a/libcst/_nodes/tests/test_attribute.py b/libcst/_nodes/tests/test_attribute.py index b2140dfa..e3c021e3 100644 --- a/libcst/_nodes/tests/test_attribute.py +++ b/libcst/_nodes/tests/test_attribute.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_await.py b/libcst/_nodes/tests/test_await.py index 16be6764..a07f2c62 100644 --- a/libcst/_nodes/tests/test_await.py +++ b/libcst/_nodes/tests/test_await.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_binary_op.py b/libcst/_nodes/tests/test_binary_op.py index edaa3588..50f8ff79 100644 --- a/libcst/_nodes/tests/test_binary_op.py +++ b/libcst/_nodes/tests/test_binary_op.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_boolean_op.py b/libcst/_nodes/tests/test_boolean_op.py index 5506ec16..bf63a49d 100644 --- a/libcst/_nodes/tests/test_boolean_op.py +++ b/libcst/_nodes/tests/test_boolean_op.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_call.py b/libcst/_nodes/tests/test_call.py index a5325642..c58af996 100644 --- a/libcst/_nodes/tests/test_call.py +++ b/libcst/_nodes/tests/test_call.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_classdef.py b/libcst/_nodes/tests/test_classdef.py index fe347283..1f0c4090 100644 --- a/libcst/_nodes/tests/test_classdef.py +++ b/libcst/_nodes/tests/test_classdef.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any, Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_comment.py b/libcst/_nodes/tests/test_comment.py index 46b892b5..79393765 100644 --- a/libcst/_nodes/tests/test_comment.py +++ b/libcst/_nodes/tests/test_comment.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_comparison.py b/libcst/_nodes/tests/test_comparison.py index 1148afdf..b70dab1e 100644 --- a/libcst/_nodes/tests/test_comparison.py +++ b/libcst/_nodes/tests/test_comparison.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, Optional import libcst as cst diff --git a/libcst/_nodes/tests/test_cst_node.py b/libcst/_nodes/tests/test_cst_node.py index bbe8c53b..e3cb7e9a 100644 --- a/libcst/_nodes/tests/test_cst_node.py +++ b/libcst/_nodes/tests/test_cst_node.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent from typing import Union diff --git a/libcst/_nodes/tests/test_del.py b/libcst/_nodes/tests/test_del.py index dd76be26..83d97cb4 100644 --- a/libcst/_nodes/tests/test_del.py +++ b/libcst/_nodes/tests/test_del.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_dict.py b/libcst/_nodes/tests/test_dict.py index 0d529289..425adb79 100644 --- a/libcst/_nodes/tests/test_dict.py +++ b/libcst/_nodes/tests/test_dict.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_dict_comp.py b/libcst/_nodes/tests/test_dict_comp.py index dd5d219b..829cb96f 100644 --- a/libcst/_nodes/tests/test_dict_comp.py +++ b/libcst/_nodes/tests/test_dict_comp.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_docstring.py b/libcst/_nodes/tests/test_docstring.py index 8596a952..0f46f2f6 100644 --- a/libcst/_nodes/tests/test_docstring.py +++ b/libcst/_nodes/tests/test_docstring.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent from typing import Optional diff --git a/libcst/_nodes/tests/test_else.py b/libcst/_nodes/tests/test_else.py index cbeb0a5a..ffaad752 100644 --- a/libcst/_nodes/tests/test_else.py +++ b/libcst/_nodes/tests/test_else.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_empty_line.py b/libcst/_nodes/tests/test_empty_line.py index b9b06852..ad4c647c 100644 --- a/libcst/_nodes/tests/test_empty_line.py +++ b/libcst/_nodes/tests/test_empty_line.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import libcst as cst from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock from libcst.testing.utils import data_provider diff --git a/libcst/_nodes/tests/test_for.py b/libcst/_nodes/tests/test_for.py index 321cc3e6..ffc91b48 100644 --- a/libcst/_nodes/tests/test_for.py +++ b/libcst/_nodes/tests/test_for.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index e05d1856..472d30aa 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any, Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_global.py b/libcst/_nodes/tests/test_global.py index 7df81739..038c0368 100644 --- a/libcst/_nodes/tests/test_global.py +++ b/libcst/_nodes/tests/test_global.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_if.py b/libcst/_nodes/tests/test_if.py index 3ccf0c1c..52dffa33 100644 --- a/libcst/_nodes/tests/test_if.py +++ b/libcst/_nodes/tests/test_if.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_ifexp.py b/libcst/_nodes/tests/test_ifexp.py index 41bcade1..ef65b9d5 100644 --- a/libcst/_nodes/tests/test_ifexp.py +++ b/libcst/_nodes/tests/test_ifexp.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, Optional import libcst as cst diff --git a/libcst/_nodes/tests/test_import.py b/libcst/_nodes/tests/test_import.py index 07a604a6..cd56da51 100644 --- a/libcst/_nodes/tests/test_import.py +++ b/libcst/_nodes/tests/test_import.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_indented_block.py b/libcst/_nodes/tests/test_indented_block.py index 79ca580e..4397c864 100644 --- a/libcst/_nodes/tests/test_indented_block.py +++ b/libcst/_nodes/tests/test_indented_block.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, Optional import libcst as cst diff --git a/libcst/_nodes/tests/test_lambda.py b/libcst/_nodes/tests/test_lambda.py index 1c7ff5d6..5ff75f1d 100644 --- a/libcst/_nodes/tests/test_lambda.py +++ b/libcst/_nodes/tests/test_lambda.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, Optional import libcst as cst diff --git a/libcst/_nodes/tests/test_leaf_small_statements.py b/libcst/_nodes/tests/test_leaf_small_statements.py index 22144934..9ab3e499 100644 --- a/libcst/_nodes/tests/test_leaf_small_statements.py +++ b/libcst/_nodes/tests/test_leaf_small_statements.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import libcst as cst from libcst._nodes.tests.base import CSTNodeTest from libcst.testing.utils import data_provider diff --git a/libcst/_nodes/tests/test_list.py b/libcst/_nodes/tests/test_list.py index c050af8a..a4a08b95 100644 --- a/libcst/_nodes/tests/test_list.py +++ b/libcst/_nodes/tests/test_list.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any, Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_matrix_multiply.py b/libcst/_nodes/tests/test_matrix_multiply.py index 6a98bee9..9f50dd28 100644 --- a/libcst/_nodes/tests/test_matrix_multiply.py +++ b/libcst/_nodes/tests/test_matrix_multiply.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_module.py b/libcst/_nodes/tests/test_module.py index 05bd9e40..671a23a6 100644 --- a/libcst/_nodes/tests/test_module.py +++ b/libcst/_nodes/tests/test_module.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Tuple, cast import libcst as cst diff --git a/libcst/_nodes/tests/test_namedexpr.py b/libcst/_nodes/tests/test_namedexpr.py index 34937e45..b0eccbf1 100644 --- a/libcst/_nodes/tests/test_namedexpr.py +++ b/libcst/_nodes/tests/test_namedexpr.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_newline.py b/libcst/_nodes/tests/test_newline.py index b0f1239a..ab5935bf 100644 --- a/libcst/_nodes/tests/test_newline.py +++ b/libcst/_nodes/tests/test_newline.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_nonlocal.py b/libcst/_nodes/tests/test_nonlocal.py index 634caccb..86b65301 100644 --- a/libcst/_nodes/tests/test_nonlocal.py +++ b/libcst/_nodes/tests/test_nonlocal.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_number.py b/libcst/_nodes/tests/test_number.py index 9d4db69d..517a97c5 100644 --- a/libcst/_nodes/tests/test_number.py +++ b/libcst/_nodes/tests/test_number.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, Optional import libcst as cst diff --git a/libcst/_nodes/tests/test_raise.py b/libcst/_nodes/tests/test_raise.py index 14c58755..7113357e 100644 --- a/libcst/_nodes/tests/test_raise.py +++ b/libcst/_nodes/tests/test_raise.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_removal_behavior.py b/libcst/_nodes/tests/test_removal_behavior.py index 23efbcfc..aeb9745f 100644 --- a/libcst/_nodes/tests/test_removal_behavior.py +++ b/libcst/_nodes/tests/test_removal_behavior.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Type, Union import libcst as cst diff --git a/libcst/_nodes/tests/test_return.py b/libcst/_nodes/tests/test_return.py index e3402086..75aa587a 100644 --- a/libcst/_nodes/tests/test_return.py +++ b/libcst/_nodes/tests/test_return.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_set.py b/libcst/_nodes/tests/test_set.py index 4f95b9fd..434bf0ab 100644 --- a/libcst/_nodes/tests/test_set.py +++ b/libcst/_nodes/tests/test_set.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any, Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_simple_comp.py b/libcst/_nodes/tests/test_simple_comp.py index 613d71d8..cf73176b 100644 --- a/libcst/_nodes/tests/test_simple_comp.py +++ b/libcst/_nodes/tests/test_simple_comp.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any, Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_simple_statement.py b/libcst/_nodes/tests/test_simple_statement.py index 74e556a0..5870e56e 100644 --- a/libcst/_nodes/tests/test_simple_statement.py +++ b/libcst/_nodes/tests/test_simple_statement.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_simple_whitespace.py b/libcst/_nodes/tests/test_simple_whitespace.py index 71569565..b571d7f2 100644 --- a/libcst/_nodes/tests/test_simple_whitespace.py +++ b/libcst/_nodes/tests/test_simple_whitespace.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_small_statement.py b/libcst/_nodes/tests/test_small_statement.py index 2781d3df..73e5296f 100644 --- a/libcst/_nodes/tests/test_small_statement.py +++ b/libcst/_nodes/tests/test_small_statement.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_subscript.py b/libcst/_nodes/tests/test_subscript.py index 6e074931..7e39eb5e 100644 --- a/libcst/_nodes/tests/test_subscript.py +++ b/libcst/_nodes/tests/test_subscript.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, Optional import libcst as cst diff --git a/libcst/_nodes/tests/test_trailing_whitespace.py b/libcst/_nodes/tests/test_trailing_whitespace.py index 6c7e7c64..d11d553e 100644 --- a/libcst/_nodes/tests/test_trailing_whitespace.py +++ b/libcst/_nodes/tests/test_trailing_whitespace.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import libcst as cst from libcst._nodes.tests.base import CSTNodeTest from libcst.testing.utils import data_provider diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index 5e0d0f97..b9492a7a 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_tuple.py b/libcst/_nodes/tests/test_tuple.py index 71b290f9..f3a49bed 100644 --- a/libcst/_nodes/tests/test_tuple.py +++ b/libcst/_nodes/tests/test_tuple.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any, Callable import libcst as cst diff --git a/libcst/_nodes/tests/test_unary_op.py b/libcst/_nodes/tests/test_unary_op.py index 9bef5fed..c9dbb53b 100644 --- a/libcst/_nodes/tests/test_unary_op.py +++ b/libcst/_nodes/tests/test_unary_op.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, Optional import libcst as cst diff --git a/libcst/_nodes/tests/test_while.py b/libcst/_nodes/tests/test_while.py index a0682ed0..1bdc8976 100644 --- a/libcst/_nodes/tests/test_while.py +++ b/libcst/_nodes/tests/test_while.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_with.py b/libcst/_nodes/tests/test_with.py index c431593f..b74487c7 100644 --- a/libcst/_nodes/tests/test_with.py +++ b/libcst/_nodes/tests/test_with.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any import libcst as cst diff --git a/libcst/_nodes/tests/test_yield.py b/libcst/_nodes/tests/test_yield.py index 1dfb75d8..83263beb 100644 --- a/libcst/_nodes/tests/test_yield.py +++ b/libcst/_nodes/tests/test_yield.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Any, Callable, Optional import libcst as cst diff --git a/libcst/_nodes/whitespace.py b/libcst/_nodes/whitespace.py index b9bc412c..22182ebe 100644 --- a/libcst/_nodes/whitespace.py +++ b/libcst/_nodes/whitespace.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import re from abc import ABC, abstractmethod diff --git a/libcst/_parser/__init__.py b/libcst/_parser/__init__.py index 09af7bcf..62642369 100644 --- a/libcst/_parser/__init__.py +++ b/libcst/_parser/__init__.py @@ -2,5 +2,3 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. - -# pyre-strict diff --git a/libcst/_parser/base_parser.py b/libcst/_parser/base_parser.py index 4c5cd85c..dc7f75ee 100644 --- a/libcst/_parser/base_parser.py +++ b/libcst/_parser/base_parser.py @@ -22,7 +22,6 @@ # As a consequence parser may only be used once. # - Supports our custom Token class, instead of `parso.python.tokenize.Token`. -# pyre-strict from dataclasses import dataclass, field from typing import Generic, Iterable, List, Sequence, TypeVar, Union diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index afd878fb..e7c76a92 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe import re import typing @@ -861,10 +862,13 @@ def convert_atom_basic( elif child.type.name == "NUMBER": # We must determine what type of number it is since we split node # types up this way. + # pyre-fixme[16]: Module `tokenize` has no attribute `Intnumber`. if re.fullmatch(INTNUMBER_RE, child.string): return WithLeadingWhitespace(Integer(child.string), child.whitespace_before) + # pyre-fixme[16]: Module `tokenize` has no attribute `Floatnumber`. elif re.fullmatch(FLOATNUMBER_RE, child.string): return WithLeadingWhitespace(Float(child.string), child.whitespace_before) + # pyre-fixme[16]: Module `tokenize` has no attribute `Imagnumber`. elif re.fullmatch(IMAGNUMBER_RE, child.string): return WithLeadingWhitespace( Imaginary(child.string), child.whitespace_before diff --git a/libcst/_parser/conversions/module.py b/libcst/_parser/conversions/module.py index 17d4c467..ad3bed66 100644 --- a/libcst/_parser/conversions/module.py +++ b/libcst/_parser/conversions/module.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe from typing import Any, Sequence diff --git a/libcst/_parser/conversions/params.py b/libcst/_parser/conversions/params.py index 1153790e..597e7ac9 100644 --- a/libcst/_parser/conversions/params.py +++ b/libcst/_parser/conversions/params.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe from typing import Any, List, Optional, Sequence, Union diff --git a/libcst/_parser/conversions/statement.py b/libcst/_parser/conversions/statement.py index 2cc5c5a4..8ff7ac8f 100644 --- a/libcst/_parser/conversions/statement.py +++ b/libcst/_parser/conversions/statement.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe from typing import Any, Dict, List, Optional, Sequence, Tuple, Type diff --git a/libcst/_parser/conversions/terminals.py b/libcst/_parser/conversions/terminals.py index 4eb85018..96d9391b 100644 --- a/libcst/_parser/conversions/terminals.py +++ b/libcst/_parser/conversions/terminals.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe from typing import Any diff --git a/libcst/_parser/custom_itertools.py b/libcst/_parser/custom_itertools.py index 71541cc9..ccbb1a1f 100644 --- a/libcst/_parser/custom_itertools.py +++ b/libcst/_parser/custom_itertools.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from itertools import zip_longest from typing import Iterable, Iterator, TypeVar diff --git a/libcst/_parser/detect_config.py b/libcst/_parser/detect_config.py index b4ce02f2..0748762e 100644 --- a/libcst/_parser/detect_config.py +++ b/libcst/_parser/detect_config.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import itertools import re @@ -48,6 +47,7 @@ def _detect_encoding(source: Union[str, bytes]) -> str: if isinstance(source, str): return "utf-8" + # pyre-fixme[16]: Module `tokenize` has no attribute `detect_encoding`. return py_tokenize_detect_encoding(BytesIO(source).readline)[0] diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index 6e0b5493..1a27e976 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ Parser entrypoints define the way users of our API are allowed to interact with the parser. A parser entrypoint should take the source code and some configuration diff --git a/libcst/_parser/grammar.py b/libcst/_parser/grammar.py index 2123f738..631714e3 100644 --- a/libcst/_parser/grammar.py +++ b/libcst/_parser/grammar.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import re from functools import lru_cache from typing import FrozenSet, Iterator, Mapping, Optional, Tuple, Union diff --git a/libcst/_parser/parso/pgen2/generator.py b/libcst/_parser/parso/pgen2/generator.py index 79c47759..c08b164f 100644 --- a/libcst/_parser/parso/pgen2/generator.py +++ b/libcst/_parser/parso/pgen2/generator.py @@ -11,6 +11,7 @@ # # The following changes were made: # - Type stubs were directly applied. +# pyre-unsafe """ This module defines the data structures used to represent a grammar. diff --git a/libcst/_parser/parso/pgen2/grammar_parser.py b/libcst/_parser/parso/pgen2/grammar_parser.py index 2a6dbac8..0d30199d 100644 --- a/libcst/_parser/parso/pgen2/grammar_parser.py +++ b/libcst/_parser/parso/pgen2/grammar_parser.py @@ -11,6 +11,7 @@ # # The following changes were made: # - Type stubs were directly applied. +# pyre-unsafe from typing import Generator, List, Optional, Tuple diff --git a/libcst/_parser/parso/python/token.py b/libcst/_parser/parso/python/token.py index fe974e99..204ce94d 100644 --- a/libcst/_parser/parso/python/token.py +++ b/libcst/_parser/parso/python/token.py @@ -12,8 +12,8 @@ # The following changes were made: # - Explicit TokenType references instead of dynamic creation. # - Use dataclasses instead of raw classes. +# pyre-unsafe -# pyre-strict from dataclasses import dataclass diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index 80d54e3c..4b1905cb 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -25,6 +25,7 @@ # Basically this is a stripped down version of the standard library module, so # you can read the documentation there. Additionally we included some speed and # memory optimizations here. +# pyre-unsafe from __future__ import absolute_import import itertools as _itertools diff --git a/libcst/_parser/parso/tests/test_fstring.py b/libcst/_parser/parso/tests/test_fstring.py index 304bd10a..6851e8b1 100644 --- a/libcst/_parser/parso/tests/test_fstring.py +++ b/libcst/_parser/parso/tests/test_fstring.py @@ -12,6 +12,7 @@ # The following changes were made: # - Convert base test to Unittet # - Remove grammar-specific tests +# pyre-unsafe from libcst._parser.parso.python.tokenize import tokenize from libcst._parser.parso.utils import parse_version_string from libcst.testing.utils import UnitTest, data_provider diff --git a/libcst/_parser/parso/tests/test_tokenize.py b/libcst/_parser/parso/tests/test_tokenize.py index 02d96d91..f2c62d33 100644 --- a/libcst/_parser/parso/tests/test_tokenize.py +++ b/libcst/_parser/parso/tests/test_tokenize.py @@ -12,6 +12,7 @@ # The following changes were made: # - Convert base test to Unittet # - Remove grammar-specific tests +# pyre-unsafe # -*- coding: utf-8 # This file contains Unicode characters. from textwrap import dedent diff --git a/libcst/_parser/parso/tests/test_utils.py b/libcst/_parser/parso/tests/test_utils.py index df14609f..03ba9a68 100644 --- a/libcst/_parser/parso/tests/test_utils.py +++ b/libcst/_parser/parso/tests/test_utils.py @@ -12,6 +12,7 @@ # The following changes were made: # - Convert base test to Unittet # - Remove grammar-specific tests +# pyre-unsafe from libcst._parser.parso.utils import python_bytes_to_unicode, split_lines from libcst.testing.utils import UnitTest, data_provider diff --git a/libcst/_parser/parso/utils.py b/libcst/_parser/parso/utils.py index 6cf233c4..a6388040 100644 --- a/libcst/_parser/parso/utils.py +++ b/libcst/_parser/parso/utils.py @@ -16,7 +16,6 @@ # - Make PythonVersionInfo directly usable in hashmaps # - Unroll total ordering because Pyre doesn't understand it -# pyre-strict import re import sys diff --git a/libcst/_parser/production_decorator.py b/libcst/_parser/production_decorator.py index 804ce0c8..c982bc8d 100644 --- a/libcst/_parser/production_decorator.py +++ b/libcst/_parser/production_decorator.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, Optional, Sequence, TypeVar from libcst._parser.types.conversions import NonterminalConversion diff --git a/libcst/_parser/python_parser.py b/libcst/_parser/python_parser.py index 3a05072b..05ea0b57 100644 --- a/libcst/_parser/python_parser.py +++ b/libcst/_parser/python_parser.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe from typing import Any, Iterable, Mapping, Sequence diff --git a/libcst/_parser/tests/test_detect_config.py b/libcst/_parser/tests/test_detect_config.py index aa6024c6..a46106dc 100644 --- a/libcst/_parser/tests/test_detect_config.py +++ b/libcst/_parser/tests/test_detect_config.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import dataclasses from typing import Union diff --git a/libcst/_parser/tests/test_footer_behavior.py b/libcst/_parser/tests/test_footer_behavior.py index 9139fb0c..23ff4e25 100644 --- a/libcst/_parser/tests/test_footer_behavior.py +++ b/libcst/_parser/tests/test_footer_behavior.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent import libcst as cst diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index df204d11..6d651f3b 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent from typing import Callable diff --git a/libcst/_parser/tests/test_whitespace_parser.py b/libcst/_parser/tests/test_whitespace_parser.py index d1a4bafa..dcbafa7e 100644 --- a/libcst/_parser/tests/test_whitespace_parser.py +++ b/libcst/_parser/tests/test_whitespace_parser.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from dataclasses import dataclass from typing import Callable, Sequence, TypeVar diff --git a/libcst/_parser/tests/test_wrapped_tokenize.py b/libcst/_parser/tests/test_wrapped_tokenize.py index 2bd77100..56bf3dbd 100644 --- a/libcst/_parser/tests/test_wrapped_tokenize.py +++ b/libcst/_parser/tests/test_wrapped_tokenize.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe from typing import Sequence diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index 5a417f91..7c76e4c7 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import abc import codecs diff --git a/libcst/_parser/types/conversions.py b/libcst/_parser/types/conversions.py index e0f38dc8..d0193624 100644 --- a/libcst/_parser/types/conversions.py +++ b/libcst/_parser/types/conversions.py @@ -9,5 +9,7 @@ from libcst._parser.types.config import ParserConfig from libcst._parser.types.token import Token +# pyre-fixme[33]: Aliased annotation cannot contain `Any`. NonterminalConversion = Callable[[ParserConfig, Sequence[Any]], Any] +# pyre-fixme[33]: Aliased annotation cannot contain `Any`. TerminalConversion = Callable[[ParserConfig, Token], Any] diff --git a/libcst/_parser/types/partials.py b/libcst/_parser/types/partials.py index ef8377c5..a53f3778 100644 --- a/libcst/_parser/types/partials.py +++ b/libcst/_parser/types/partials.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from dataclasses import dataclass from typing import Generic, Optional, Sequence, TypeVar, Union diff --git a/libcst/_parser/types/production.py b/libcst/_parser/types/production.py index ad68d45c..bb60014a 100644 --- a/libcst/_parser/types/production.py +++ b/libcst/_parser/types/production.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from dataclasses import dataclass from typing import Optional diff --git a/libcst/_parser/types/tests/test_config.py b/libcst/_parser/types/tests/test_config.py index 39110bb3..6c0c0d0b 100644 --- a/libcst/_parser/types/tests/test_config.py +++ b/libcst/_parser/types/tests/test_config.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable from libcst._parser.types.config import PartialParserConfig diff --git a/libcst/_parser/types/token.py b/libcst/_parser/types/token.py index 054a0461..60ddb2a2 100644 --- a/libcst/_parser/types/token.py +++ b/libcst/_parser/types/token.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from dataclasses import dataclass from typing import Optional, Tuple diff --git a/libcst/_parser/types/whitespace_state.py b/libcst/_parser/types/whitespace_state.py index 4df9a111..b5554a2b 100644 --- a/libcst/_parser/types/whitespace_state.py +++ b/libcst/_parser/types/whitespace_state.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ Defines the state object used by the whitespace parser. """ diff --git a/libcst/_parser/whitespace_parser.py b/libcst/_parser/whitespace_parser.py index 35e0ecaa..b9df6c7e 100644 --- a/libcst/_parser/whitespace_parser.py +++ b/libcst/_parser/whitespace_parser.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ Parso doesn't attempt to parse (or even emit tokens for) whitespace or comments that isn't syntatically important. Instead, we're just given the whitespace as a "prefix" of diff --git a/libcst/_parser/wrapped_tokenize.py b/libcst/_parser/wrapped_tokenize.py index 78a5eb6e..6104757d 100644 --- a/libcst/_parser/wrapped_tokenize.py +++ b/libcst/_parser/wrapped_tokenize.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ Parso's tokenize doesn't give us tokens in the format that we'd ideally like, so this diff --git a/libcst/_position.py b/libcst/_position.py index dcff0cce..82411402 100644 --- a/libcst/_position.py +++ b/libcst/_position.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ Data structures used for storing position information. diff --git a/libcst/_removal_sentinel.py b/libcst/_removal_sentinel.py index 3e7f68a5..b8ba8498 100644 --- a/libcst/_removal_sentinel.py +++ b/libcst/_removal_sentinel.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ Used by visitors. This is hoisted into a separate module to avoid some circular dependencies in the definition of CSTNode. diff --git a/libcst/_tabs.py b/libcst/_tabs.py index c18ecc74..44e245ba 100644 --- a/libcst/_tabs.py +++ b/libcst/_tabs.py @@ -3,8 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict - def expand_tabs(line: str) -> str: """ diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 3f00976a..06a18a65 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict # This file was generated by libcst.codegen.gen_matcher_classes from typing import TYPE_CHECKING, Optional, Union diff --git a/libcst/_typed_visitor_base.py b/libcst/_typed_visitor_base.py index be48210a..8525b050 100644 --- a/libcst/_typed_visitor_base.py +++ b/libcst/_typed_visitor_base.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast diff --git a/libcst/_types.py b/libcst/_types.py index fe10ccfc..98342da8 100644 --- a/libcst/_types.py +++ b/libcst/_types.py @@ -3,8 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict - from typing import TYPE_CHECKING, TypeVar diff --git a/libcst/_visitors.py b/libcst/_visitors.py index b415bd13..1d710ff2 100644 --- a/libcst/_visitors.py +++ b/libcst/_visitors.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import TYPE_CHECKING, Union from libcst._metadata_dependent import MetadataDependent diff --git a/libcst/codegen/gather.py b/libcst/codegen/gather.py index 59f007d9..fe309b74 100644 --- a/libcst/codegen/gather.py +++ b/libcst/codegen/gather.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import inspect from collections import defaultdict from collections.abc import Sequence as ABCSequence diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index 0c6a9584..cfde98a3 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from dataclasses import dataclass, fields from typing import Generator, List, Optional, Sequence, Set, Tuple, Type, Union @@ -455,7 +454,6 @@ generated_code.append( ) generated_code.append("# LICENSE file in the root directory of this source tree.") generated_code.append("") -generated_code.append("# pyre-strict") generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes") generated_code.append("from abc import ABC") diff --git a/libcst/codegen/gen_type_mapping.py b/libcst/codegen/gen_type_mapping.py index 7d39581a..2f6b2a9d 100644 --- a/libcst/codegen/gen_type_mapping.py +++ b/libcst/codegen/gen_type_mapping.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import List from libcst.codegen.gather import imports, nodebases, nodeuses @@ -17,7 +16,6 @@ generated_code.append( ) generated_code.append("# LICENSE file in the root directory of this source tree.") generated_code.append("") -generated_code.append("# pyre-strict") generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_type_mapping") generated_code.append("from typing import Dict as TypingDict, Type, Union") diff --git a/libcst/codegen/gen_visitor_functions.py b/libcst/codegen/gen_visitor_functions.py index 2385eb29..8eec1bc0 100644 --- a/libcst/codegen/gen_visitor_functions.py +++ b/libcst/codegen/gen_visitor_functions.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from dataclasses import fields from typing import List @@ -18,7 +17,6 @@ generated_code.append( ) generated_code.append("# LICENSE file in the root directory of this source tree.") generated_code.append("") -generated_code.append("# pyre-strict") generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes") generated_code.append("from typing import Optional, Union, TYPE_CHECKING") diff --git a/libcst/codegen/generate.py b/libcst/codegen/generate.py index c20123ef..c6bf67b3 100644 --- a/libcst/codegen/generate.py +++ b/libcst/codegen/generate.py @@ -8,7 +8,6 @@ # python -m libcst.codegen.generate --help # python -m libcst.codegen.generate visitors -# pyre-strict import argparse import os import os.path diff --git a/libcst/codegen/tests/test_codegen_clean.py b/libcst/codegen/tests/test_codegen_clean.py index 7bfe037b..dad5166e 100644 --- a/libcst/codegen/tests/test_codegen_clean.py +++ b/libcst/codegen/tests/test_codegen_clean.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import os import os.path diff --git a/libcst/codegen/transforms.py b/libcst/codegen/transforms.py index afa49be1..61bbddf5 100644 --- a/libcst/codegen/transforms.py +++ b/libcst/codegen/transforms.py @@ -12,7 +12,6 @@ # use features such as matchers which rely on previously generated # code to function. -# pyre-strict import ast import libcst as cst diff --git a/libcst/codemod/__init__.py b/libcst/codemod/__init__.py index e43eb63a..b2b2feab 100644 --- a/libcst/codemod/__init__.py +++ b/libcst/codemod/__init__.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod._cli import ( ParallelTransformResult, diff_code, diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 67355a19..b9ffd280 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict """ Provides helpers for CLI interaction. """ diff --git a/libcst/codemod/_codemod.py b/libcst/codemod/_codemod.py index fb66d1a7..bae27674 100644 --- a/libcst/codemod/_codemod.py +++ b/libcst/codemod/_codemod.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from abc import ABC, abstractmethod from contextlib import contextmanager from dataclasses import replace diff --git a/libcst/codemod/_command.py b/libcst/codemod/_command.py index f9374b03..4529be00 100644 --- a/libcst/codemod/_command.py +++ b/libcst/codemod/_command.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import argparse import inspect from abc import ABC, abstractmethod diff --git a/libcst/codemod/_context.py b/libcst/codemod/_context.py index 2ed21a39..98e57adf 100644 --- a/libcst/codemod/_context.py +++ b/libcst/codemod/_context.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from dataclasses import dataclass, field from typing import Any, Dict, List, Optional diff --git a/libcst/codemod/_runner.py b/libcst/codemod/_runner.py index e52dec01..a4b68dd4 100644 --- a/libcst/codemod/_runner.py +++ b/libcst/codemod/_runner.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict """ Provides everything needed to run a CodemodCommand. diff --git a/libcst/codemod/_testing.py b/libcst/codemod/_testing.py index 8442e3eb..75895b10 100644 --- a/libcst/codemod/_testing.py +++ b/libcst/codemod/_testing.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from textwrap import dedent from typing import Optional, Sequence, Type diff --git a/libcst/codemod/_visitor.py b/libcst/codemod/_visitor.py index f66a0645..d368b854 100644 --- a/libcst/codemod/_visitor.py +++ b/libcst/codemod/_visitor.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Mapping import libcst as cst diff --git a/libcst/codemod/commands/__init__.py b/libcst/codemod/commands/__init__.py index 7e2062ff..602d2685 100644 --- a/libcst/codemod/commands/__init__.py +++ b/libcst/codemod/commands/__init__.py @@ -3,4 +3,3 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict diff --git a/libcst/codemod/commands/add_pyre_directive.py b/libcst/codemod/commands/add_pyre_directive.py index 93568899..165ebb0b 100644 --- a/libcst/codemod/commands/add_pyre_directive.py +++ b/libcst/codemod/commands/add_pyre_directive.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import re from abc import ABC from typing import Pattern diff --git a/libcst/codemod/commands/convert_format_to_fstring.py b/libcst/codemod/commands/convert_format_to_fstring.py index a7c05028..92431698 100644 --- a/libcst/codemod/commands/convert_format_to_fstring.py +++ b/libcst/codemod/commands/convert_format_to_fstring.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import argparse import ast from typing import Generator, List, Optional, Sequence, Set, Tuple diff --git a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py index 80c6eb52..09935b70 100644 --- a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py +++ b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import List, Optional, Sequence import libcst as cst diff --git a/libcst/codemod/commands/convert_percent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py index 2f2a9aa0..f56b055e 100644 --- a/libcst/codemod/commands/convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/convert_percent_format_to_fstring.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import re from typing import Callable, cast diff --git a/libcst/codemod/commands/ensure_import_present.py b/libcst/codemod/commands/ensure_import_present.py index 18f6cb84..04b1d129 100644 --- a/libcst/codemod/commands/ensure_import_present.py +++ b/libcst/codemod/commands/ensure_import_present.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import argparse from typing import Generator, Type diff --git a/libcst/codemod/commands/fix_pyre_directives.py b/libcst/codemod/commands/fix_pyre_directives.py index 90e4bbc6..b5310d8b 100644 --- a/libcst/codemod/commands/fix_pyre_directives.py +++ b/libcst/codemod/commands/fix_pyre_directives.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Dict, Sequence, Union import libcst diff --git a/libcst/codemod/commands/noop.py b/libcst/codemod/commands/noop.py index 5876f67c..eef1d897 100644 --- a/libcst/codemod/commands/noop.py +++ b/libcst/codemod/commands/noop.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst import Module from libcst.codemod import CodemodCommand diff --git a/libcst/codemod/commands/remove_pyre_directive.py b/libcst/codemod/commands/remove_pyre_directive.py index f1f05e6e..a9d38506 100644 --- a/libcst/codemod/commands/remove_pyre_directive.py +++ b/libcst/codemod/commands/remove_pyre_directive.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import re from abc import ABC from typing import Pattern, Union diff --git a/libcst/codemod/commands/remove_unused_imports.py b/libcst/codemod/commands/remove_unused_imports.py index 15ebbe53..1c38153f 100644 --- a/libcst/codemod/commands/remove_unused_imports.py +++ b/libcst/codemod/commands/remove_unused_imports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst import Import, ImportFrom from libcst.codemod import VisitorBasedCodemodCommand diff --git a/libcst/codemod/commands/strip_strings_from_types.py b/libcst/codemod/commands/strip_strings_from_types.py index 499ebc21..eee89fe4 100644 --- a/libcst/codemod/commands/strip_strings_from_types.py +++ b/libcst/codemod/commands/strip_strings_from_types.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Union import libcst diff --git a/libcst/codemod/commands/tests/__init__.py b/libcst/codemod/commands/tests/__init__.py index 7e2062ff..602d2685 100644 --- a/libcst/codemod/commands/tests/__init__.py +++ b/libcst/codemod/commands/tests/__init__.py @@ -3,4 +3,3 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict diff --git a/libcst/codemod/commands/tests/test_add_pyre_directive.py b/libcst/codemod/commands/tests/test_add_pyre_directive.py index f675a221..b7fad93d 100644 --- a/libcst/codemod/commands/tests/test_add_pyre_directive.py +++ b/libcst/codemod/commands/tests/test_add_pyre_directive.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.add_pyre_directive import AddPyreUnsafeCommand diff --git a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py index c9427aa3..ee1d3cb0 100644 --- a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py +++ b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.convert_format_to_fstring import ConvertFormatStringCommand diff --git a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py index 653d2fd4..675bf58a 100644 --- a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py +++ b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.convert_namedtuple_to_dataclass import ( ConvertNamedTupleToDataclassCommand, diff --git a/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py b/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py index 0dd79d19..2e65eac2 100644 --- a/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.convert_percent_format_to_fstring import ( ConvertPercentFormatStringCommand, diff --git a/libcst/codemod/commands/tests/test_ensure_import_present.py b/libcst/codemod/commands/tests/test_ensure_import_present.py index 5c4ed255..b389c3bf 100644 --- a/libcst/codemod/commands/tests/test_ensure_import_present.py +++ b/libcst/codemod/commands/tests/test_ensure_import_present.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.ensure_import_present import EnsureImportPresentCommand diff --git a/libcst/codemod/commands/tests/test_fix_pyre_directives.py b/libcst/codemod/commands/tests/test_fix_pyre_directives.py index e4f6718c..dea57825 100644 --- a/libcst/codemod/commands/tests/test_fix_pyre_directives.py +++ b/libcst/codemod/commands/tests/test_fix_pyre_directives.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.fix_pyre_directives import FixPyreDirectivesCommand diff --git a/libcst/codemod/commands/tests/test_noop.py b/libcst/codemod/commands/tests/test_noop.py index 7b731882..827f3a3c 100644 --- a/libcst/codemod/commands/tests/test_noop.py +++ b/libcst/codemod/commands/tests/test_noop.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.noop import NOOPCommand diff --git a/libcst/codemod/commands/tests/test_remove_pyre_directive.py b/libcst/codemod/commands/tests/test_remove_pyre_directive.py index 01e619e2..06ccb8f8 100644 --- a/libcst/codemod/commands/tests/test_remove_pyre_directive.py +++ b/libcst/codemod/commands/tests/test_remove_pyre_directive.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.remove_pyre_directive import ( RemovePyreStrictCommand, diff --git a/libcst/codemod/commands/tests/test_remove_unused_imports.py b/libcst/codemod/commands/tests/test_remove_unused_imports.py index 8c500e0b..c11da4c1 100644 --- a/libcst/codemod/commands/tests/test_remove_unused_imports.py +++ b/libcst/codemod/commands/tests/test_remove_unused_imports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.remove_unused_imports import RemoveUnusedImportsCommand diff --git a/libcst/codemod/commands/tests/test_strip_strings_from_types.py b/libcst/codemod/commands/tests/test_strip_strings_from_types.py index dce41b7c..3ec292b9 100644 --- a/libcst/codemod/commands/tests/test_strip_strings_from_types.py +++ b/libcst/codemod/commands/tests/test_strip_strings_from_types.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodTest from libcst.codemod.commands.strip_strings_from_types import StripStringsCommand diff --git a/libcst/codemod/commands/tests/test_unnecessary_format_string.py b/libcst/codemod/commands/tests/test_unnecessary_format_string.py index dfa58685..ebf1977a 100644 --- a/libcst/codemod/commands/tests/test_unnecessary_format_string.py +++ b/libcst/codemod/commands/tests/test_unnecessary_format_string.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Type from libcst.codemod import Codemod, CodemodTest diff --git a/libcst/codemod/commands/unnecessary_format_string.py b/libcst/codemod/commands/unnecessary_format_string.py index 3d79d004..a363b43f 100644 --- a/libcst/codemod/commands/unnecessary_format_string.py +++ b/libcst/codemod/commands/unnecessary_format_string.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import libcst import libcst.matchers as m from libcst.codemod import VisitorBasedCodemodCommand diff --git a/libcst/codemod/tests/__init__.py b/libcst/codemod/tests/__init__.py index 7e2062ff..602d2685 100644 --- a/libcst/codemod/tests/__init__.py +++ b/libcst/codemod/tests/__init__.py @@ -3,4 +3,3 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict diff --git a/libcst/codemod/tests/test_cli.py b/libcst/codemod/tests/test_cli.py index b722118e..a4d1404f 100644 --- a/libcst/codemod/tests/test_cli.py +++ b/libcst/codemod/tests/test_cli.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Optional from libcst.codemod._cli import _calculate_module diff --git a/libcst/codemod/tests/test_codemod.py b/libcst/codemod/tests/test_codemod.py index 10051904..4fb5dbaa 100644 --- a/libcst/codemod/tests/test_codemod.py +++ b/libcst/codemod/tests/test_codemod.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from unittest import expectedFailure import libcst as cst diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index becf29e8..6e6e55f1 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import subprocess diff --git a/libcst/codemod/tests/test_metadata.py b/libcst/codemod/tests/test_metadata.py index f8fef769..acd25bf4 100644 --- a/libcst/codemod/tests/test_metadata.py +++ b/libcst/codemod/tests/test_metadata.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from textwrap import dedent import libcst as cst diff --git a/libcst/codemod/tests/test_runner.py b/libcst/codemod/tests/test_runner.py index de0adbcf..07673055 100644 --- a/libcst/codemod/tests/test_runner.py +++ b/libcst/codemod/tests/test_runner.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from textwrap import dedent from typing import Dict diff --git a/libcst/codemod/visitors/__init__.py b/libcst/codemod/visitors/__init__.py index 83deb251..97dbeb99 100644 --- a/libcst/codemod/visitors/__init__.py +++ b/libcst/codemod/visitors/__init__.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._apply_type_annotations import ApplyTypeAnnotationsVisitor from libcst.codemod.visitors._gather_exports import GatherExportsVisitor diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index 929cf886..aa487dd8 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from collections import defaultdict from typing import Dict, List, Optional, Sequence, Set, Tuple, Union diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 2c9a0f82..9a8db3d7 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree # -# pyre-strict from dataclasses import dataclass, field from typing import Dict, List, Optional, Sequence, Set, Tuple, Union diff --git a/libcst/codemod/visitors/_gather_exports.py b/libcst/codemod/visitors/_gather_exports.py index e49b042f..9b28e25d 100644 --- a/libcst/codemod/visitors/_gather_exports.py +++ b/libcst/codemod/visitors/_gather_exports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Set import libcst diff --git a/libcst/codemod/visitors/_gather_imports.py b/libcst/codemod/visitors/_gather_imports.py index e4c94b3b..5920890f 100644 --- a/libcst/codemod/visitors/_gather_imports.py +++ b/libcst/codemod/visitors/_gather_imports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Dict, List, Sequence, Set, Tuple, Union import libcst diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 841812c4..8c3dc9f8 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst as cst diff --git a/libcst/codemod/visitors/tests/__init__.py b/libcst/codemod/visitors/tests/__init__.py index 7e2062ff..602d2685 100644 --- a/libcst/codemod/visitors/tests/__init__.py +++ b/libcst/codemod/visitors/tests/__init__.py @@ -3,4 +3,3 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index 5ca4747d..cfc4b26b 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.codemod import CodemodContext, CodemodTest from libcst.codemod.visitors import AddImportsVisitor diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index e32348a7..08f6aa1b 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import textwrap from typing import Type diff --git a/libcst/codemod/visitors/tests/test_gather_exports.py b/libcst/codemod/visitors/tests/test_gather_exports.py index d06e45ca..410d77d6 100644 --- a/libcst/codemod/visitors/tests/test_gather_exports.py +++ b/libcst/codemod/visitors/tests/test_gather_exports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst import parse_module from libcst.codemod import CodemodContext, CodemodTest from libcst.codemod.visitors import GatherExportsVisitor diff --git a/libcst/codemod/visitors/tests/test_gather_imports.py b/libcst/codemod/visitors/tests/test_gather_imports.py index 03afd6c6..b1e2c102 100644 --- a/libcst/codemod/visitors/tests/test_gather_imports.py +++ b/libcst/codemod/visitors/tests/test_gather_imports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst import parse_module from libcst.codemod import CodemodContext, CodemodTest from libcst.codemod.visitors import GatherImportsVisitor diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index 76c751c6..21932b65 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import libcst as cst import libcst.matchers as m from libcst.codemod import CodemodContext, CodemodTest, VisitorBasedCodemodCommand diff --git a/libcst/helpers/__init__.py b/libcst/helpers/__init__.py index 9b8b00ca..77c4389c 100644 --- a/libcst/helpers/__init__.py +++ b/libcst/helpers/__init__.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from libcst.helpers._statement import ( get_absolute_module_for_import, diff --git a/libcst/helpers/_statement.py b/libcst/helpers/_statement.py index 7dbce455..4a3825f2 100644 --- a/libcst/helpers/_statement.py +++ b/libcst/helpers/_statement.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Optional import libcst as cst diff --git a/libcst/helpers/_template.py b/libcst/helpers/_template.py index ecfa2cd7..e3437c1d 100644 --- a/libcst/helpers/_template.py +++ b/libcst/helpers/_template.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Dict, Mapping, Optional, Set, Union diff --git a/libcst/helpers/common.py b/libcst/helpers/common.py index d18b9fb2..5bf7e460 100644 --- a/libcst/helpers/common.py +++ b/libcst/helpers/common.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Type from libcst._types import CSTNodeT diff --git a/libcst/helpers/expression.py b/libcst/helpers/expression.py index fb191872..2a93c509 100644 --- a/libcst/helpers/expression.py +++ b/libcst/helpers/expression.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Optional, Union import libcst as cst diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 044797f9..5f2bddbe 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from itertools import islice from typing import List diff --git a/libcst/helpers/tests/test_expression.py b/libcst/helpers/tests/test_expression.py index 4d1d7d07..ca2d3533 100644 --- a/libcst/helpers/tests/test_expression.py +++ b/libcst/helpers/tests/test_expression.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from ast import literal_eval from typing import Optional, Union diff --git a/libcst/helpers/tests/test_module.py b/libcst/helpers/tests/test_module.py index fda8befe..c5be94d5 100644 --- a/libcst/helpers/tests/test_module.py +++ b/libcst/helpers/tests/test_module.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import libcst from libcst.helpers import insert_header_comments from libcst.testing.utils import UnitTest diff --git a/libcst/helpers/tests/test_statement.py b/libcst/helpers/tests/test_statement.py index 7dfe70ba..8e38f19e 100644 --- a/libcst/helpers/tests/test_statement.py +++ b/libcst/helpers/tests/test_statement.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Optional import libcst as cst diff --git a/libcst/helpers/tests/test_template.py b/libcst/helpers/tests/test_template.py index eb13e1ae..1847728d 100644 --- a/libcst/helpers/tests/test_template.py +++ b/libcst/helpers/tests/test_template.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import os from textwrap import dedent diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index df13b71b..25a4d439 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict # This file was generated by libcst.codegen.gen_matcher_classes from abc import ABC diff --git a/libcst/matchers/_decorators.py b/libcst/matchers/_decorators.py index b341b7d0..7dd2e741 100644 --- a/libcst/matchers/_decorators.py +++ b/libcst/matchers/_decorators.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Callable, TypeVar from libcst.matchers._matcher_base import BaseMatcherNode diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index e706870e..16c16d5b 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import collections.abc import copy import inspect diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index 3dbc1d26..d8a22986 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict # This file was generated by libcst.codegen.gen_type_mapping from typing import Dict as TypingDict, Type, Union diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index 6f68300b..1232191a 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from inspect import ismethod, signature from typing import ( Any, diff --git a/libcst/matchers/tests/test_decorators.py b/libcst/matchers/tests/test_decorators.py index 5864f732..c102f2ab 100644 --- a/libcst/matchers/tests/test_decorators.py +++ b/libcst/matchers/tests/test_decorators.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from ast import literal_eval from textwrap import dedent from typing import List, Set diff --git a/libcst/matchers/tests/test_extract.py b/libcst/matchers/tests/test_extract.py index bdae463b..2bf45b91 100644 --- a/libcst/matchers/tests/test_extract.py +++ b/libcst/matchers/tests/test_extract.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Tuple import libcst as cst diff --git a/libcst/matchers/tests/test_findall.py b/libcst/matchers/tests/test_findall.py index bb96a85c..ade99743 100644 --- a/libcst/matchers/tests/test_findall.py +++ b/libcst/matchers/tests/test_findall.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from textwrap import dedent from typing import Optional, Sequence diff --git a/libcst/matchers/tests/test_matchers.py b/libcst/matchers/tests/test_matchers.py index b6ddd763..ab1e5cf1 100644 --- a/libcst/matchers/tests/test_matchers.py +++ b/libcst/matchers/tests/test_matchers.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import libcst as cst import libcst.matchers as m from libcst.matchers import matches diff --git a/libcst/matchers/tests/test_matchers_with_metadata.py b/libcst/matchers/tests/test_matchers_with_metadata.py index 6067b37b..4e859973 100644 --- a/libcst/matchers/tests/test_matchers_with_metadata.py +++ b/libcst/matchers/tests/test_matchers_with_metadata.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent from typing import Sequence, Set, Tuple diff --git a/libcst/matchers/tests/test_replace.py b/libcst/matchers/tests/test_replace.py index 75d3282c..cc922446 100644 --- a/libcst/matchers/tests/test_replace.py +++ b/libcst/matchers/tests/test_replace.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict from typing import Dict, Sequence, Union import libcst as cst diff --git a/libcst/matchers/tests/test_visitors.py b/libcst/matchers/tests/test_visitors.py index 3874b2cd..3fc2c658 100644 --- a/libcst/matchers/tests/test_visitors.py +++ b/libcst/matchers/tests/test_visitors.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import pickle from typing import Union diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index e18e62b8..2e70e9df 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from libcst._position import CodePosition, CodeRange from libcst.metadata.base_provider import ( diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index eeaa26ca..b2910f5c 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from pathlib import Path from types import MappingProxyType from typing import ( diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index 52c03d0e..c5a1becc 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from enum import Enum, auto from typing import Optional, Sequence diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index 7bd5d8de..e1bf10a1 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -3,8 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict - from pathlib import Path from typing import TYPE_CHECKING, Collection, Dict, List, Mapping diff --git a/libcst/metadata/parent_node_provider.py b/libcst/metadata/parent_node_provider.py index 2919c943..fa36e94d 100644 --- a/libcst/metadata/parent_node_provider.py +++ b/libcst/metadata/parent_node_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Optional diff --git a/libcst/metadata/position_provider.py b/libcst/metadata/position_provider.py index 3b169754..7bd1e713 100644 --- a/libcst/metadata/position_provider.py +++ b/libcst/metadata/position_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import re from contextlib import contextmanager diff --git a/libcst/metadata/reentrant_codegen.py b/libcst/metadata/reentrant_codegen.py index 34450cac..89e11c93 100644 --- a/libcst/metadata/reentrant_codegen.py +++ b/libcst/metadata/reentrant_codegen.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from dataclasses import dataclass, field from typing import List, Optional, Sequence diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 302cdd44..e1ef8605 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import abc import builtins diff --git a/libcst/metadata/span_provider.py b/libcst/metadata/span_provider.py index 33002b55..400ec284 100644 --- a/libcst/metadata/span_provider.py +++ b/libcst/metadata/span_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from contextlib import contextmanager from dataclasses import dataclass, field diff --git a/libcst/metadata/tests/__init__.py b/libcst/metadata/tests/__init__.py index 09af7bcf..62642369 100644 --- a/libcst/metadata/tests/__init__.py +++ b/libcst/metadata/tests/__init__.py @@ -2,5 +2,3 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. - -# pyre-strict diff --git a/libcst/metadata/tests/test_base_provider.py b/libcst/metadata/tests/test_base_provider.py index a1c92c51..e19a4571 100644 --- a/libcst/metadata/tests/test_base_provider.py +++ b/libcst/metadata/tests/test_base_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import cast import libcst as cst diff --git a/libcst/metadata/tests/test_expression_context_provider.py b/libcst/metadata/tests/test_expression_context_provider.py index 91c3926b..220b3284 100644 --- a/libcst/metadata/tests/test_expression_context_provider.py +++ b/libcst/metadata/tests/test_expression_context_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Dict, Optional, cast diff --git a/libcst/metadata/tests/test_full_repo_manager.py b/libcst/metadata/tests/test_full_repo_manager.py index 578bad05..9e57c778 100644 --- a/libcst/metadata/tests/test_full_repo_manager.py +++ b/libcst/metadata/tests/test_full_repo_manager.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import json from pathlib import Path from unittest.mock import Mock, patch diff --git a/libcst/metadata/tests/test_metadata_provider.py b/libcst/metadata/tests/test_metadata_provider.py index a0855f34..4f7be175 100644 --- a/libcst/metadata/tests/test_metadata_provider.py +++ b/libcst/metadata/tests/test_metadata_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from unittest.mock import Mock import libcst as cst diff --git a/libcst/metadata/tests/test_metadata_wrapper.py b/libcst/metadata/tests/test_metadata_wrapper.py index fbf46a31..731a910f 100644 --- a/libcst/metadata/tests/test_metadata_wrapper.py +++ b/libcst/metadata/tests/test_metadata_wrapper.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Optional diff --git a/libcst/metadata/tests/test_parent_node_provider.py b/libcst/metadata/tests/test_parent_node_provider.py index 6ab4b506..f5f617f6 100644 --- a/libcst/metadata/tests/test_parent_node_provider.py +++ b/libcst/metadata/tests/test_parent_node_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent diff --git a/libcst/metadata/tests/test_position_provider.py b/libcst/metadata/tests/test_position_provider.py index ad3e1c11..07374575 100644 --- a/libcst/metadata/tests/test_position_provider.py +++ b/libcst/metadata/tests/test_position_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import Tuple diff --git a/libcst/metadata/tests/test_qualified_name_provider.py b/libcst/metadata/tests/test_qualified_name_provider.py index 27325e9d..cf7fa68b 100644 --- a/libcst/metadata/tests/test_qualified_name_provider.py +++ b/libcst/metadata/tests/test_qualified_name_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent from typing import Collection, Mapping, Optional, Tuple diff --git a/libcst/metadata/tests/test_reentrant_codegen.py b/libcst/metadata/tests/test_reentrant_codegen.py index fa96a1b3..da6b3990 100644 --- a/libcst/metadata/tests/test_reentrant_codegen.py +++ b/libcst/metadata/tests/test_reentrant_codegen.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent from typing import Callable diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 34be54ad..f84a415c 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent from typing import Mapping, Tuple, cast diff --git a/libcst/metadata/tests/test_span_provider.py b/libcst/metadata/tests/test_span_provider.py index cc2c462e..600b8820 100644 --- a/libcst/metadata/tests/test_span_provider.py +++ b/libcst/metadata/tests/test_span_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import libcst as cst from libcst.metadata.span_provider import ( diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index 9af17225..f6c97751 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import json from pathlib import Path diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index a59ead8c..fdbb6611 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import json import subprocess from pathlib import Path diff --git a/libcst/metadata/wrapper.py b/libcst/metadata/wrapper.py index 23740e2a..208aacfc 100644 --- a/libcst/metadata/wrapper.py +++ b/libcst/metadata/wrapper.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -# pyre-strict import textwrap from contextlib import ExitStack diff --git a/libcst/testing/utils.py b/libcst/testing/utils.py index 7ed8122b..cba70ed1 100644 --- a/libcst/testing/utils.py +++ b/libcst/testing/utils.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +# pyre-unsafe import inspect import re diff --git a/libcst/tests/pyre/simple_class.json b/libcst/tests/pyre/simple_class.json index e4de20a2..dc621ff3 100644 --- a/libcst/tests/pyre/simple_class.json +++ b/libcst/tests/pyre/simple_class.json @@ -3,11 +3,11 @@ { "location": { "start": { - "line": 8, + "line": 7, "column": 19 }, "stop": { - "line": 8, + "line": 7, "column": 27 } }, @@ -16,11 +16,11 @@ { "location": { "start": { - "line": 11, + "line": 10, "column": 6 }, "stop": { - "line": 11, + "line": 10, "column": 10 } }, @@ -29,11 +29,11 @@ { "location": { "start": { - "line": 12, + "line": 11, "column": 8 }, "stop": { - "line": 12, + "line": 11, "column": 16 } }, @@ -42,11 +42,11 @@ { "location": { "start": { - "line": 12, + "line": 11, "column": 17 }, "stop": { - "line": 12, + "line": 11, "column": 21 } }, @@ -55,11 +55,11 @@ { "location": { "start": { - "line": 12, + "line": 11, "column": 23 }, "stop": { - "line": 12, + "line": 11, "column": 24 } }, @@ -68,11 +68,11 @@ { "location": { "start": { - "line": 12, + "line": 11, "column": 26 }, "stop": { - "line": 12, + "line": 11, "column": 29 } }, @@ -81,11 +81,11 @@ { "location": { "start": { - "line": 12, + "line": 11, "column": 34 }, "stop": { - "line": 12, + "line": 11, "column": 38 } }, @@ -94,11 +94,11 @@ { "location": { "start": { - "line": 13, + "line": 12, "column": 8 }, "stop": { - "line": 13, + "line": 12, "column": 12 } }, @@ -107,11 +107,11 @@ { "location": { "start": { - "line": 13, + "line": 12, "column": 8 }, "stop": { - "line": 13, + "line": 12, "column": 19 } }, @@ -120,11 +120,11 @@ { "location": { "start": { - "line": 13, + "line": 12, "column": 21 }, "stop": { - "line": 13, + "line": 12, "column": 24 } }, @@ -133,11 +133,11 @@ { "location": { "start": { - "line": 13, + "line": 12, "column": 27 }, "stop": { - "line": 13, + "line": 12, "column": 28 } }, @@ -146,11 +146,11 @@ { "location": { "start": { - "line": 16, + "line": 15, "column": 6 }, "stop": { - "line": 16, + "line": 15, "column": 19 } }, @@ -159,11 +159,11 @@ { "location": { "start": { - "line": 17, + "line": 16, "column": 8 }, "stop": { - "line": 17, + "line": 16, "column": 17 } }, @@ -172,11 +172,11 @@ { "location": { "start": { - "line": 17, + "line": 16, "column": 18 }, "stop": { - "line": 17, + "line": 16, "column": 22 } }, @@ -185,11 +185,11 @@ { "location": { "start": { - "line": 17, + "line": 16, "column": 24 }, "stop": { - "line": 17, + "line": 16, "column": 25 } }, @@ -198,11 +198,11 @@ { "location": { "start": { - "line": 17, + "line": 16, "column": 27 }, "stop": { - "line": 17, + "line": 16, "column": 30 } }, @@ -211,11 +211,11 @@ { "location": { "start": { - "line": 17, + "line": 16, "column": 35 }, "stop": { - "line": 17, + "line": 16, "column": 43 } }, @@ -224,11 +224,11 @@ { "location": { "start": { - "line": 17, + "line": 16, "column": 35 }, "stop": { - "line": 17, + "line": 16, "column": 49 } }, @@ -237,11 +237,11 @@ { "location": { "start": { - "line": 17, + "line": 16, "column": 44 }, "stop": { - "line": 17, + "line": 16, "column": 48 } }, @@ -250,11 +250,11 @@ { "location": { "start": { - "line": 18, + "line": 17, "column": 15 }, "stop": { - "line": 18, + "line": 17, "column": 42 } }, @@ -263,11 +263,11 @@ { "location": { "start": { - "line": 18, + "line": 17, "column": 16 }, "stop": { - "line": 18, + "line": 17, "column": 20 } }, @@ -276,11 +276,11 @@ { "location": { "start": { - "line": 18, + "line": 17, "column": 16 }, "stop": { - "line": 18, + "line": 17, "column": 23 } }, @@ -289,11 +289,11 @@ { "location": { "start": { - "line": 18, + "line": 17, "column": 28 }, "stop": { - "line": 18, + "line": 17, "column": 29 } }, @@ -302,11 +302,11 @@ { "location": { "start": { - "line": 18, + "line": 17, "column": 33 }, "stop": { - "line": 18, + "line": 17, "column": 38 } }, @@ -315,11 +315,11 @@ { "location": { "start": { - "line": 18, + "line": 17, "column": 33 }, "stop": { - "line": 18, + "line": 17, "column": 41 } }, @@ -328,11 +328,11 @@ { "location": { "start": { - "line": 18, + "line": 17, "column": 39 }, "stop": { - "line": 18, + "line": 17, "column": 40 } }, @@ -341,11 +341,11 @@ { "location": { "start": { - "line": 21, + "line": 20, "column": 0 }, "stop": { - "line": 21, + "line": 20, "column": 9 } }, @@ -354,11 +354,11 @@ { "location": { "start": { - "line": 21, + "line": 20, "column": 12 }, "stop": { - "line": 21, + "line": 20, "column": 25 } }, @@ -367,11 +367,11 @@ { "location": { "start": { - "line": 21, + "line": 20, "column": 12 }, "stop": { - "line": 21, + "line": 20, "column": 27 } }, @@ -380,11 +380,11 @@ { "location": { "start": { - "line": 22, + "line": 21, "column": 0 }, "stop": { - "line": 22, + "line": 21, "column": 5 } }, @@ -393,11 +393,11 @@ { "location": { "start": { - "line": 22, + "line": 21, "column": 7 }, "stop": { - "line": 22, + "line": 21, "column": 21 } }, @@ -406,11 +406,11 @@ { "location": { "start": { - "line": 22, + "line": 21, "column": 24 }, "stop": { - "line": 22, + "line": 21, "column": 33 } }, @@ -419,11 +419,11 @@ { "location": { "start": { - "line": 22, + "line": 21, "column": 24 }, "stop": { - "line": 22, + "line": 21, "column": 43 } }, @@ -432,11 +432,11 @@ { "location": { "start": { - "line": 22, + "line": 21, "column": 24 }, "stop": { - "line": 22, + "line": 21, "column": 46 } }, @@ -445,11 +445,11 @@ { "location": { "start": { - "line": 22, + "line": 21, "column": 44 }, "stop": { - "line": 22, + "line": 21, "column": 45 } }, @@ -458,11 +458,11 @@ { "location": { "start": { - "line": 23, + "line": 22, "column": 4 }, "stop": { - "line": 23, + "line": 22, "column": 8 } }, @@ -471,11 +471,11 @@ { "location": { "start": { - "line": 23, + "line": 22, "column": 12 }, "stop": { - "line": 23, + "line": 22, "column": 17 } }, @@ -484,11 +484,11 @@ { "location": { "start": { - "line": 24, + "line": 23, "column": 4 }, "stop": { - "line": 24, + "line": 23, "column": 8 } }, @@ -497,11 +497,11 @@ { "location": { "start": { - "line": 24, + "line": 23, "column": 4 }, "stop": { - "line": 24, + "line": 23, "column": 15 } }, diff --git a/libcst/tests/pyre/simple_class.py b/libcst/tests/pyre/simple_class.py index 431460e0..2ee2d32f 100644 --- a/libcst/tests/pyre/simple_class.py +++ b/libcst/tests/pyre/simple_class.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict # fmt: off from typing import Sequence diff --git a/libcst/tests/test_batched_visitor.py b/libcst/tests/test_batched_visitor.py index 73d63c2b..ee3351f4 100644 --- a/libcst/tests/test_batched_visitor.py +++ b/libcst/tests/test_batched_visitor.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import cast from unittest.mock import Mock diff --git a/libcst/tests/test_exceptions.py b/libcst/tests/test_exceptions.py index da1bf056..a1c7d842 100644 --- a/libcst/tests/test_exceptions.py +++ b/libcst/tests/test_exceptions.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import pickle from textwrap import dedent diff --git a/libcst/tests/test_fuzz.py b/libcst/tests/test_fuzz.py index 98d6867b..590449c7 100644 --- a/libcst/tests/test_fuzz.py +++ b/libcst/tests/test_fuzz.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict """ Fuzz-tests for libCST, by Zac Hatfield-Dodds (zac@hypothesis.works) diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 4da4836e..4c18643a 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict import json from pathlib import Path @@ -66,7 +65,7 @@ class TypeVerificationVisitor(cst.CSTVisitor): # remove this if condition when the type issues are fixed. if not any( node.deep_equals(name) and tup == _tup - for (name, _tup) in {(cst.Name("i"), (18, 21, 18, 22)),} + for (name, _tup) in {(cst.Name("i"), (17, 21, 17, 22)),} ): self.test.assertIn( tup, diff --git a/libcst/tests/test_tool.py b/libcst/tests/test_tool.py index f52e6c44..b5b4aeeb 100644 --- a/libcst/tests/test_tool.py +++ b/libcst/tests/test_tool.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from textwrap import dedent from libcst import parse_module diff --git a/libcst/tests/test_visitor.py b/libcst/tests/test_visitor.py index 34e006de..5bc0510e 100644 --- a/libcst/tests/test_visitor.py +++ b/libcst/tests/test_visitor.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# pyre-strict from typing import List import libcst as cst diff --git a/libcst/tool.py b/libcst/tool.py index 13d8ff67..de626ac3 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -8,7 +8,6 @@ # python -m libcst.tool --help # python -m libcst.tool print python_file.py -# pyre-strict import argparse import dataclasses import distutils.spawn diff --git a/stubs/tokenize.pyi b/stubs/tokenize.pyi index ea203906..a6f50eea 100644 --- a/stubs/tokenize.pyi +++ b/stubs/tokenize.pyi @@ -94,4 +94,4 @@ class TokenInfo(Tuple[int, str, Tuple[int, int], Tuple[int, int], int]): def __repr__(self) -> str: ... def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ... -def tokenize(Callable) -> TokenInfo: ... +def tokenize(Callable[[], bytes]) -> TokenInfo: ... From 5992a7d83d234e7575ad81107d352846ff1abb37 Mon Sep 17 00:00:00 2001 From: Shantanu Date: Tue, 16 Jun 2020 06:58:40 -0700 Subject: [PATCH 025/632] ApplyTypeAnnotationsVisitor: fix default value of keyword only and positional-only args (#314) * ApplyTypeAnnotationsVisitor: fix default values of keyword only args * ApplyTypeAnnotationsVisitor: fix default values of positional-only args Co-authored-by: hauntsaninja <> --- .../visitors/_apply_type_annotations.py | 9 ++- .../tests/test_apply_type_annotations.py | 71 +++++++++++++++++++ 2 files changed, 79 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 9a8db3d7..291d0452 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -398,7 +398,14 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return annotations.parameters.with_changes( params=update_annotation( updated_node.params.params, annotations.parameters.params - ) + ), + kwonly_params=update_annotation( + updated_node.params.kwonly_params, annotations.parameters.kwonly_params + ), + posonly_params=update_annotation( + updated_node.params.posonly_params, + annotations.parameters.posonly_params, + ), ) def _insert_empty_line( diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 08f6aa1b..2e4027b0 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -4,7 +4,9 @@ # LICENSE file in the root directory of this source tree. # +import sys import textwrap +import unittest from typing import Type from libcst import parse_module @@ -31,6 +33,25 @@ class TestApplyAnnotationsVisitor(CodemodTest): return 1 """, ), + ( + """ + def foo( + b: str, c: int = ..., *, d: str = ..., e: int, f: int = ... + ) -> int: ... + """, + """ + def foo( + b, c=5, *, d="a", e, f=10 + ) -> int: + return 1 + """, + """ + def foo( + b: str, c: int=5, *, d: str="a", e: int, f: int=10 + ) -> int: + return 1 + """, + ), ( """ import bar @@ -612,6 +633,56 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) self.assertCodemod(before, after, context_override=context) + @data_provider( + ( + ( + """ + def foo( + a: int, /, b: str, c: int = ..., *, d: str = ..., e: int, f: int = ... + ) -> int: ... + """, + """ + def foo( + a, /, b, c=5, *, d="a", e, f=10 + ) -> int: + return 1 + """, + """ + def foo( + a: int, /, b: str, c: int=5, *, d: str="a", e: int, f: int=10 + ) -> int: + return 1 + """, + ), + ( + """ + def foo( + a: int, b: int = ..., /, c: int = ..., *, d: str = ..., e: int, f: int = ... + ) -> int: ... + """, + """ + def foo( + a, b = 5, /, c = 10, *, d = "a", e, f = 20 + ) -> int: + return 1 + """, + """ + def foo( + a: int, b: int = 5, /, c: int = 10, *, d: str = "a", e: int, f: int = 20 + ) -> int: + return 1 + """, + ), + ) + ) + @unittest.skipIf(sys.version_info < (3, 8), "Unsupported Python version") + def test_annotate_functions_py38(self, stub: str, before: str, after: str) -> None: + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, parse_module(textwrap.dedent(stub.rstrip())) + ) + self.assertCodemod(before, after, context_override=context) + @data_provider( ( ( From 73608adbdb93162f2af03329f70ddeec33feac06 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 18 Jun 2020 16:31:22 +0100 Subject: [PATCH 026/632] [ExpressionContextProvider] Make subscript values always have a LOAD context (#319) Fixes #318. --- .../tests/test_remove_unused_imports.py | 7 ++++++ .../metadata/expression_context_provider.py | 4 +++- .../tests/test_expression_context_provider.py | 24 +++++++++++++++---- 3 files changed, 30 insertions(+), 5 deletions(-) diff --git a/libcst/codemod/commands/tests/test_remove_unused_imports.py b/libcst/codemod/commands/tests/test_remove_unused_imports.py index c11da4c1..d7b369ed 100644 --- a/libcst/codemod/commands/tests/test_remove_unused_imports.py +++ b/libcst/codemod/commands/tests/test_remove_unused_imports.py @@ -76,3 +76,10 @@ class RemoveUnusedImportsCommandTest(CodemodTest): """ self.assertCodemod(before, after) + + def test_access_in_assignment(self) -> None: + before = """ + from a import b + b(0)[x] = False + """ + self.assertCodemod(before, before) diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index c5a1becc..21f3a68b 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -133,7 +133,9 @@ class ExpressionContextVisitor(cst.CSTVisitor): def visit_Subscript(self, node: cst.Subscript) -> bool: self.provider.set_metadata(node, self.context) - node.value.visit(self) + node.value.visit( + ExpressionContextVisitor(self.provider, ExpressionContext.LOAD) + ) slice = node.slice if isinstance(slice, Sequence): for sli in slice: diff --git a/libcst/metadata/tests/test_expression_context_provider.py b/libcst/metadata/tests/test_expression_context_provider.py index 220b3284..439e6e3c 100644 --- a/libcst/metadata/tests/test_expression_context_provider.py +++ b/libcst/metadata/tests/test_expression_context_provider.py @@ -43,6 +43,7 @@ class DependentVisitor(CSTVisitor): self.test.assertEqual( self.get_metadata(ExpressionContextProvider, node, None), self.name_to_context[node.value], + f"Context doesn't match for Name {node.value}", ) def visit_Attribute(self, node: cst.Attribute) -> None: @@ -148,7 +149,7 @@ class ExpressionContextProviderTest(UnitTest): DependentVisitor( test=self, name_to_context={ - "a": ExpressionContext.STORE, + "a": ExpressionContext.LOAD, "b": ExpressionContext.LOAD, "c": ExpressionContext.LOAD, "d": ExpressionContext.LOAD, @@ -226,7 +227,7 @@ class ExpressionContextProviderTest(UnitTest): DependentVisitor( test=self, name_to_context={ - "a": ExpressionContext.DEL, + "a": ExpressionContext.LOAD, "b": ExpressionContext.LOAD, }, subscript_to_context={"a[b]": ExpressionContext.DEL}, @@ -278,7 +279,7 @@ class ExpressionContextProviderTest(UnitTest): ) ) - def test_list_with_assing(self) -> None: + def test_list_with_assign(self) -> None: wrapper = MetadataWrapper(parse_module("[a] = [b]")) wrapper.visit( DependentVisitor( @@ -294,7 +295,7 @@ class ExpressionContextProviderTest(UnitTest): ) ) - def test_nested_list_with_assing(self) -> None: + def test_nested_list_with_assign(self) -> None: wrapper = MetadataWrapper(parse_module("[[a, b], c] = [[d, e], f]")) wrapper.visit( DependentVisitor( @@ -316,6 +317,21 @@ class ExpressionContextProviderTest(UnitTest): ) ) + def test_expressions_with_assign(self) -> None: + wrapper = MetadataWrapper(parse_module("f(a)[b] = c")) + wrapper.visit( + DependentVisitor( + test=self, + name_to_context={ + "a": ExpressionContext.LOAD, + "b": ExpressionContext.LOAD, + "c": ExpressionContext.LOAD, + "f": ExpressionContext.LOAD, + }, + subscript_to_context={"f(a)[b]": ExpressionContext.STORE}, + ) + ) + def test_invalid_type_for_context(self) -> None: wrapper = MetadataWrapper(parse_module("a()")) wrapper.visit( From 030df0625c4509dd606b9f0e7ea5f9d1b7d35da5 Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Thu, 25 Jun 2020 11:13:35 -0400 Subject: [PATCH 027/632] Add codemod RenameCommand (#308) Add codemod RenameCommand which renames all instances of a local or imported object. --- libcst/codemod/commands/rename.py | 359 +++++++++++ libcst/codemod/commands/tests/test_rename.py | 593 +++++++++++++++++++ 2 files changed, 952 insertions(+) create mode 100644 libcst/codemod/commands/rename.py create mode 100644 libcst/codemod/commands/tests/test_rename.py diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py new file mode 100644 index 00000000..ce50cc12 --- /dev/null +++ b/libcst/codemod/commands/rename.py @@ -0,0 +1,359 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict +import argparse +from typing import Callable, Optional, Sequence, Set, Tuple, Union + +import libcst as cst +from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand +from libcst.codemod.visitors import AddImportsVisitor, RemoveImportsVisitor +from libcst.helpers import get_full_name_for_node +from libcst.metadata import QualifiedNameProvider + + +def leave_import_decorator( + method: Callable[..., Union[cst.Import, cst.ImportFrom]] +) -> Callable[..., Union[cst.Import, cst.ImportFrom]]: + # We want to record any 'as name' that is relevant but only after we leave the corresponding Import/ImportFrom node since + # we don't want the 'as name' to interfere with children 'Name' and 'Attribute' nodes. + def wrapper( + self: "RenameCommand", + original_node: Union[cst.Import, cst.ImportFrom], + updated_node: Union[cst.Import, cst.ImportFrom], + ) -> Union[cst.Import, cst.ImportFrom]: + updated_node = method(self, original_node, updated_node) + if original_node != updated_node: + self.record_asname(original_node) + return updated_node + + return wrapper + + +class RenameCommand(VisitorBasedCodemodCommand): + """ + Rename all instances of a local or imported object. + """ + + DESCRIPTION: str = "Rename all instances of a local or imported object." + + METADATA_DEPENDENCIES = (QualifiedNameProvider,) + + @staticmethod + def add_args(parser: argparse.ArgumentParser) -> None: + parser.add_argument( + "--old_name", + dest="old_name", + required=True, + help="Full dotted name of object to rename. Eg: `foo.bar.baz`", + ) + + parser.add_argument( + "--new_name", + dest="new_name", + required=True, + help=( + "Full dotted name of replacement object. You may provide a single-colon-delimited name to specify how you want the new import to be structured." + + "\nEg: `foo:bar.baz` will be translated to `from foo import bar`." + + "\nIf no ':' character is provided, the import statement will default to `from foo.bar import baz` for a `new_name` value of `foo.bar.baz`" + + " or simply replace the old import on the spot if the old import is an exact match." + ), + ) + + def __init__(self, context: CodemodContext, old_name: str, new_name: str) -> None: + super().__init__(context) + + new_module, has_colon, new_mod_or_obj = new_name.rpartition(":") + # Exit early if improperly formatted args. + if ":" in new_module: + raise ValueError("Error: `new_name` should contain at most one colon.") + if ":" in old_name: + raise ValueError("Error: `old_name` should not contain any colons.") + + if not has_colon or not new_module: + new_module, _, new_mod_or_obj = new_name.rpartition(".") + + self.new_name: str = new_name.replace(":", ".").strip(".") + self.new_module: str = new_module.replace(":", ".").strip(".") + self.new_mod_or_obj: str = new_mod_or_obj + + # If `new_name` contains a single colon at the end, then we assume the user wants the import + # to be structured as 'import new_name'. So both self.new_mod_or_obj and self.old_mod_or_obj + # will be empty in this case. + if not self.new_mod_or_obj: + old_module = old_name + old_mod_or_obj = "" + else: + old_module, _, old_mod_or_obj = old_name.rpartition(".") + + self.old_name: str = old_name + self.old_module: str = old_module + self.old_mod_or_obj: str = old_mod_or_obj + + self.as_name: Optional[Tuple[str, str]] = None + + # A set of nodes that have been renamed to help with the cleanup of now potentially unused + # imports, during import cleanup in `leave_Module`. + self.scheduled_removals: Set[cst.CSTNode] = set() + # If an import has been renamed while inside an `Import` or `ImportFrom` node, we want to flag + # this so that we do not end up with two of the same import. + self.bypass_import = False + + def visit_Import(self, node: cst.Import) -> None: + for import_alias in node.names: + alias_name = get_full_name_for_node(import_alias.name) + if alias_name is not None: + if alias_name == self.old_name or alias_name.startswith( + self.old_name + "." + ): + # If the import statement is exactly equivalent to the old name, or we are renaming a top-level module of the import, + # it will be taken care of in `leave_Name` or `leave_Attribute` when visiting the Name and Attribute children of this Import. + self.bypass_import = True + + @leave_import_decorator + def leave_Import( + self, original_node: cst.Import, updated_node: cst.Import + ) -> cst.Import: + new_names = [] + for import_alias in updated_node.names: + import_alias_name = import_alias.name + import_alias_full_name = get_full_name_for_node(import_alias_name) + if import_alias_full_name is None: + raise Exception("Could not parse full name for ImportAlias.name node.") + + if isinstance(import_alias_name, cst.Name) and self.old_name.startswith( + import_alias_full_name + "." + ): + # Might, be in use elsewhere in the code, so schedule a potential removal, and add another alias. + new_names.append(import_alias) + self.scheduled_removals.add(original_node) + new_names.append( + cst.ImportAlias( + name=cst.Name( + value=self.gen_replacement_module(import_alias_full_name) + ) + ) + ) + self.bypass_import = True + elif isinstance( + import_alias_name, cst.Attribute + ) and self.old_name.startswith(import_alias_full_name + "."): + # Same idea as above. + new_names.append(import_alias) + self.scheduled_removals.add(original_node) + new_name_node: Union[ + cst.Attribute, cst.Name + ] = self.gen_name_or_attr_node( + self.gen_replacement_module(import_alias_full_name) + ) + new_names.append(cst.ImportAlias(name=new_name_node)) + self.bypass_import = True + else: + new_names.append(import_alias) + + return updated_node.with_changes(names=new_names) + + def visit_ImportFrom(self, node: cst.ImportFrom) -> None: + module = node.module + if module is None: + return + imported_module_name = get_full_name_for_node(module) + if imported_module_name is None: + return + if imported_module_name == self.old_name or imported_module_name.startswith( + self.old_name + "." + ): + # If the imported module is exactly equivalent to the old name or we are renaming a parent module of the current module, + # it will be taken care of in `leave_Name` or `leave_Attribute` when visiting the children of this ImportFrom. + self.bypass_import = True + + @leave_import_decorator + def leave_ImportFrom( + self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom + ) -> cst.ImportFrom: + module = updated_node.module + if module is None: + return updated_node + imported_module_name = get_full_name_for_node(module) + names = original_node.names + + if imported_module_name is None or not isinstance(names, Sequence): + return updated_node + + else: + new_names = [] + for import_alias in names: + alias_name = get_full_name_for_node(import_alias.name) + if alias_name is not None: + qual_name = f"{imported_module_name}.{alias_name}" + if self.old_name == qual_name: + + replacement_module = self.gen_replacement_module( + imported_module_name + ) + replacement_obj = self.gen_replacement(alias_name) + if not replacement_obj: + # The user has requested an `import` statement rather than an `from ... import`. + # This will be taken care of in `leave_Module`, in the meantime, schedule for potential removal. + new_names.append(import_alias) + self.scheduled_removals.add(original_node) + continue + + new_import_alias_name: Union[ + cst.Attribute, cst.Name + ] = self.gen_name_or_attr_node(replacement_obj) + # Rename on the spot only if this is the only imported name under the module. + if len(names) == 1: + self.bypass_import = True + return updated_node.with_changes( + module=cst.parse_expression(replacement_module), + names=(cst.ImportAlias(name=new_import_alias_name),), + ) + # Or if the module name is to stay the same. + elif replacement_module == imported_module_name: + self.bypass_import = True + new_names.append( + cst.ImportAlias(name=new_import_alias_name) + ) + else: + if self.old_name.startswith(qual_name + "."): + # This import might be in use elsewhere in the code, so schedule a potential removal. + self.scheduled_removals.add(original_node) + new_names.append(import_alias) + + return updated_node.with_changes(names=new_names) + return updated_node + + def leave_Name( + self, original_node: cst.Name, updated_node: cst.Name + ) -> Union[cst.Attribute, cst.Name]: + full_name_for_node: str = original_node.value + full_replacement_name = self.gen_replacement(full_name_for_node) + + # If a node has no associated QualifiedName, we are still inside an import statement. + inside_import_statement: bool = not self.get_metadata( + QualifiedNameProvider, original_node, set() + ) + if QualifiedNameProvider.has_name(self, original_node, self.old_name) or ( + inside_import_statement and full_replacement_name == self.new_name + ): + if not full_replacement_name: + full_replacement_name = self.new_name + if not inside_import_statement: + self.scheduled_removals.add(original_node) + return self.gen_name_or_attr_node(full_replacement_name) + + return updated_node + + def leave_Attribute( + self, original_node: cst.Attribute, updated_node: cst.Attribute + ) -> Union[cst.Name, cst.Attribute]: + full_name_for_node = get_full_name_for_node(original_node) + if full_name_for_node is None: + raise Exception("Could not parse full name for Attribute node.") + full_replacement_name = self.gen_replacement(full_name_for_node) + + # If a node has no associated QualifiedName, we are still inside an import statement. + inside_import_statement: bool = not self.get_metadata( + QualifiedNameProvider, original_node, set() + ) + if QualifiedNameProvider.has_name(self, original_node, self.old_name,) or ( + inside_import_statement and full_replacement_name == self.new_name + ): + new_value, new_attr = self.new_module, self.new_mod_or_obj + if not inside_import_statement: + self.scheduled_removals.add(original_node.value) + if full_replacement_name == self.new_name: + return updated_node.with_changes( + value=cst.parse_expression(new_value), + attr=cst.Name(value=new_attr.rstrip(".")), + ) + + return self.gen_name_or_attr_node(new_attr) + + return updated_node + + def leave_Module( + self, original_node: cst.Module, updated_node: cst.Module + ) -> cst.Module: + for removal_node in self.scheduled_removals: + RemoveImportsVisitor.remove_unused_import_by_node( + self.context, removal_node + ) + # If bypass_import is False, we know that no import statements were directly renamed, and the fact + # that we have any `self.scheduled_removals` tells us we encountered a matching `old_name` in the code. + if not self.bypass_import and self.scheduled_removals: + if self.new_module: + new_obj: Optional[str] = self.new_mod_or_obj.split(".")[ + 0 + ] if self.new_mod_or_obj else None + AddImportsVisitor.add_needed_import( + self.context, module=self.new_module, obj=new_obj + ) + return updated_node + + def gen_replacement(self, original_name: str) -> str: + module_as_name = self.as_name + if module_as_name is not None: + if original_name == module_as_name[0]: + original_name = module_as_name[1] + elif original_name.startswith(module_as_name[0] + "."): + original_name = original_name.replace( + module_as_name[0] + ".", module_as_name[1] + ".", 1 + ) + + if original_name == self.old_mod_or_obj: + return self.new_mod_or_obj + elif original_name == ".".join([self.old_module, self.old_mod_or_obj]): + return self.new_name + elif original_name.endswith("." + self.old_mod_or_obj): + return self.new_mod_or_obj + else: + return self.gen_replacement_module(original_name) + + def gen_replacement_module(self, original_module: str) -> str: + return self.new_module if original_module == self.old_module else "" + + def gen_name_or_attr_node( + self, dotted_expression: str + ) -> Union[cst.Attribute, cst.Name]: + name_or_attr_node: cst.BaseExpression = cst.parse_expression(dotted_expression) + if not isinstance(name_or_attr_node, (cst.Name, cst.Attribute)): + raise Exception( + "`parse_expression()` on dotted path returned non-Attribute-or-Name." + ) + return name_or_attr_node + + def record_asname(self, original_node: Union[cst.Import, cst.ImportFrom]) -> None: + # Record the import's `as` name if it has one, and set the attribute mapping. + names = original_node.names + if not isinstance(names, Sequence): + return + for import_alias in names: + alias_name = get_full_name_for_node(import_alias.name) + if isinstance(original_node, cst.ImportFrom): + module = original_node.module + if module is None: + return + module_name = get_full_name_for_node(module) + if module_name is None: + return + qual_name = f"{module_name}.{alias_name}" + else: + qual_name = alias_name + if qual_name is not None and alias_name is not None: + if qual_name == self.old_name or self.old_name.startswith( + qual_name + "." + ): + as_name_optional = import_alias.asname + as_name_node = ( + as_name_optional.name if as_name_optional is not None else None + ) + if as_name_node is not None and isinstance( + as_name_node, (cst.Name, cst.Attribute) + ): + full_as_name = get_full_name_for_node(as_name_node) + if full_as_name is not None: + self.as_name = (full_as_name, alias_name) diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py new file mode 100644 index 00000000..b5427280 --- /dev/null +++ b/libcst/codemod/commands/tests/test_rename.py @@ -0,0 +1,593 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict + +from libcst.codemod import CodemodTest +from libcst.codemod.commands.rename import RenameCommand + + +class TestRenameCommand(CodemodTest): + + TRANSFORM = RenameCommand + + def test_rename_name(self) -> None: + + before = """ + from foo import bar + + def test() -> None: + bar(5) + """ + after = """ + from baz import qux + + def test() -> None: + qux(5) + """ + + self.assertCodemod(before, after, old_name="foo.bar", new_name="baz.qux") + + def test_rename_name_asname(self) -> None: + + before = """ + from foo import bar as bla + + def test() -> None: + bla(5) + """ + after = """ + from baz import qux + + def test() -> None: + qux(5) + """ + + self.assertCodemod( + before, after, old_name="foo.bar", new_name="baz.qux", + ) + + def test_rename_repeated_name_with_asname(self) -> None: + before = """ + from foo import foo as bla + + def test() -> None: + bla.bla(5) + """ + after = """ + from baz import qux + + def test() -> None: + qux.bla(5) + """ + self.assertCodemod( + before, after, old_name="foo.foo", new_name="baz.qux", + ) + + def test_rename_attr(self) -> None: + + before = """ + import a.b + + def test() -> None: + a.b.c(5) + """ + after = """ + import d.e + + def test() -> None: + d.e.f(5) + """ + + self.assertCodemod( + before, after, old_name="a.b.c", new_name="d.e.f", + ) + + def test_rename_attr_asname(self) -> None: + + before = """ + import foo as bar + + def test() -> None: + bar.qux(5) + """ + after = """ + import baz + + def test() -> None: + baz.quux(5) + """ + + self.assertCodemod( + before, after, old_name="foo.qux", new_name="baz.quux", + ) + + def test_rename_module_import(self) -> None: + before = """ + import a.b + + class Foo(a.b.C): + pass + """ + after = """ + import c.b + + class Foo(c.b.C): + pass + """ + + self.assertCodemod( + before, after, old_name="a.b", new_name="c.b", + ) + + def test_rename_module_import_2(self) -> None: + before = """ + import a.b + + class Foo(a.b.C): + pass + """ + after = """ + import c.b + + class Foo(c.b.C): + pass + """ + + self.assertCodemod( + before, after, old_name="a", new_name="c", + ) + + def test_rename_module_import_no_change(self) -> None: + # Full qualified names don't match, so don't codemod + before = """ + import a.b + + class Foo(a.b.C): + pass + """ + self.assertCodemod( + before, before, old_name="b", new_name="c.b", + ) + + def test_rename_module_import_from(self) -> None: + before = """ + from a import b + + class Foo(b.C): + pass + """ + after = """ + from c import b + + class Foo(b.C): + pass + """ + + self.assertCodemod( + before, after, old_name="a.b", new_name="c.b", + ) + + def test_rename_module_import_from_2(self) -> None: + before = """ + from a import b + + class Foo(b.C): + pass + """ + after = """ + from c import b + + class Foo(b.C): + pass + """ + + self.assertCodemod( + before, after, old_name="a", new_name="c", + ) + + def test_rename_class(self) -> None: + before = """ + from a.b import some_class + + class Foo(some_class): + pass + """ + after = """ + from c.b import some_class + + class Foo(some_class): + pass + """ + self.assertCodemod( + before, after, old_name="a.b.some_class", new_name="c.b.some_class", + ) + + def test_rename_importfrom_same_module(self) -> None: + before = """ + from a.b import Class_1, Class_2 + + class Foo(Class_1): + pass + """ + after = """ + from a.b import Class_3, Class_2 + + class Foo(Class_3): + pass + """ + self.assertCodemod( + before, after, old_name="a.b.Class_1", new_name="a.b.Class_3", + ) + + def test_rename_importfrom_same_module_2(self) -> None: + before = """ + from a.b import module_1, module_2 + + class Foo(module_1.Class_1): + pass + class Fooo(module_2.Class_2): + pass + """ + after = """ + from a.b import module_2 + from a.b.module_3 import Class_3 + + class Foo(Class_3): + pass + class Fooo(module_2.Class_2): + pass + """ + self.assertCodemod( + before, + after, + old_name="a.b.module_1.Class_1", + new_name="a.b.module_3.Class_3", + ) + + def test_rename_local_variable(self) -> None: + before = """ + x = 5 + y = 5 + x + """ + after = """ + z = 5 + y = 5 + z + """ + + self.assertCodemod( + before, after, old_name="x", new_name="z", + ) + + def test_module_does_not_change(self) -> None: + before = """ + from a import b + + class Foo(b): + pass + """ + after = """ + from a import c + + class Foo(c): + pass + """ + self.assertCodemod(before, after, old_name="a.b", new_name="a.c") + + def test_other_imports_untouched(self) -> None: + before = """ + import a, b, c + + class Foo(a.z): + bar: b.bar + baz: c.baz + """ + after = """ + import d, b, c + + class Foo(d.z): + bar: b.bar + baz: c.baz + """ + self.assertCodemod( + before, after, old_name="a.z", new_name="d.z", + ) + + def test_other_import_froms_untouched(self) -> None: + before = """ + from a import b, c, d + + class Foo(b): + bar: c.bar + baz: d.baz + """ + after = """ + from a import c, d + from f import b + + class Foo(b): + bar: c.bar + baz: d.baz + """ + self.assertCodemod( + before, after, old_name="a.b", new_name="f.b", + ) + + def test_no_removal_of_import_in_use(self) -> None: + before = """ + import a + + class Foo(a.b): + pass + class Foo2(a.c): + pass + """ + after = """ + import a, z + + class Foo(z.b): + pass + class Foo2(a.c): + pass + """ + self.assertCodemod( + before, after, old_name="a.b", new_name="z.b", + ) + + def test_no_removal_of_dotted_import_in_use(self) -> None: + before = """ + import a.b + + class Foo(a.b.c): + pass + class Foo2(a.b.d): + pass + """ + after = """ + import a.b, z.b + + class Foo(z.b.c): + pass + class Foo2(a.b.d): + pass + """ + self.assertCodemod( + before, after, old_name="a.b.c", new_name="z.b.c", + ) + + def test_no_removal_of_import_from_in_use(self) -> None: + before = """ + from a import b + + class Foo(b.some_class): + bar: b.some_other_class + """ + after = """ + from a import b + from blah import some_class + + class Foo(some_class): + bar: b.some_other_class + """ + self.assertCodemod( + before, after, old_name="a.b.some_class", new_name="blah.some_class", + ) + + def test_other_unused_imports_untouched(self) -> None: + before = """ + import a + import b + + class Foo(a.obj): + pass + """ + after = """ + import c + import b + + class Foo(c.obj): + pass + """ + self.assertCodemod( + before, after, old_name="a.obj", new_name="c.obj", + ) + + def test_complex_module_rename(self) -> None: + before = """ + from a.b.c import d + + class Foo(d.e.f): + pass + """ + after = """ + from g.h.i import j + + class Foo(j): + pass + """ + self.assertCodemod(before, after, old_name="a.b.c.d.e.f", new_name="g.h.i.j") + + def test_complex_module_rename_with_asname(self) -> None: + before = """ + from a.b.c import d as ddd + + class Foo(ddd.e.f): + pass + """ + after = """ + from g.h.i import j + + class Foo(j): + pass + """ + self.assertCodemod(before, after, old_name="a.b.c.d.e.f", new_name="g.h.i.j") + + def test_names_with_repeated_substrings(self) -> None: + before = """ + from aa import aaaa + + class Foo(aaaa.Bar): + pass + """ + after = """ + from b import c + + class Foo(c.Bar): + pass + """ + self.assertCodemod( + before, after, old_name="aa.aaaa", new_name="b.c", + ) + + def test_repeated_name(self) -> None: + before = """ + from foo import foo + + def bar(): + foo(5) + """ + after = """ + from qux import qux + + def bar(): + qux(5) + """ + self.assertCodemod( + before, after, old_name="foo.foo", new_name="qux.qux", + ) + + def test_no_codemod(self) -> None: + before = """ + from foo import bar + + def baz(): + bar(5) + """ + self.assertCodemod( + before, before, old_name="bar", new_name="qux", + ) + + def test_rename_import_prefix(self) -> None: + before = """ + import a.b.c.d + """ + after = """ + import x.y.c.d + """ + self.assertCodemod( + before, after, old_name="a.b", new_name="x.y", + ) + + def test_rename_import_from_prefix(self) -> None: + before = """ + from a.b.c.d import foo + """ + after = """ + from x.y.c.d import foo + """ + self.assertCodemod( + before, after, old_name="a.b", new_name="x.y", + ) + + def test_rename_multiple_occurrences(self) -> None: + before = """ + from a import b + + class Foo(b.some_class): + pass + class Foobar(b.some_class): + pass + """ + after = """ + from c.d import some_class + + class Foo(some_class): + pass + class Foobar(some_class): + pass + """ + self.assertCodemod( + before, after, old_name="a.b.some_class", new_name="c.d.some_class" + ) + + def test_rename_multiple_imports(self) -> None: + before = """ + import a + from a import b + from a.c import d + + class Foo(d): + pass + class Fooo(b.some_class): + pass + class Foooo(a.some_class): + pass + """ + after = """ + import z + from z import b + from z.c import d + + class Foo(d): + pass + class Fooo(b.some_class): + pass + class Foooo(z.some_class): + pass + """ + self.assertCodemod(before, after, old_name="a", new_name="z") + + def test_input_with_colon_sep(self) -> None: + before = """ + from a.b.c import d + + class Foo(d.e.f): + pass + """ + after = """ + from g.h import i + + class Foo(i.j): + pass + """ + self.assertCodemod(before, after, old_name="a.b.c.d.e.f", new_name="g.h:i.j") + + def test_input_with_colon_sep_at_the_end(self) -> None: + before = """ + from a.b.c import d + + class Foo(d.e): + pass + """ + after = """ + import g.h.i.j + + class Foo(g.h.i.j.e): + pass + """ + self.assertCodemod(before, after, old_name="a.b.c.d", new_name="g.h.i.j:") + + def test_input_with_colon_sep_at_the_front(self) -> None: + # This case should treat it as if no colon separator. + before = """ + from a.b.c import d + + class Foo(d.e): + pass + """ + after = """ + from g.h.i import j + + class Foo(j.e): + pass + """ + self.assertCodemod(before, after, old_name="a.b.c.d", new_name=":g.h.i.j") + + def test_no_change_because_no_match_was_found(self) -> None: + before = """ + from foo import bar + bar(42) + """ + self.assertCodemod(before, before, old_name="baz.bar", new_name="qux.bar") From 4a928b0e9448e053ae3b2138d80bf25476e2fd03 Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Thu, 25 Jun 2020 11:47:45 -0400 Subject: [PATCH 028/632] Bump to version 0.3.7 (#321) --- CHANGELOG.md | 16 ++++++++++++++++ libcst/_version.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c4442fb9..a37530d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,19 @@ +# 0.3.7 - 2020-06-24 + +## Added + - Added `RenameCommand` to rename all instances of a local or imported object to a specified new name. [#308](https://github.com/Instagram/LibCST/pull/308) + +## Updated + - Upgraded Codecov dev dependency to 2.1.4. [#311](https://github.com/Instagram/LibCST/pull/311) + - Enabled Pyre `strict` mode by default. [#313](https://github.com/Instagram/LibCST/pull/313) + +## Fixed + - Fixed `ImportError` under Python 3.9. [#306](https://github.com/Instagram/LibCST/pull/306) + - Fixed `stdout` being plugged into successfully codemod-ed files. [#309](https://github.com/Instagram/LibCST/pull/309) + - Fixed `QualifiedName` retrieval for names with repeated substrings. [#312](https://github.com/Instagram/LibCST/pull/312) + - Fixed default values of keyword-only and positional-only arguments in `ApplyTypeAnnotationsVisitor`. [#314](https://github.com/Instagram/LibCST/pull/314) + - Fixed `ExpressionContextProvider` by giving subscript values a `LOAD`context. [#319](https://github.com/Instagram/LibCST/pull/319) + # 0.3.6 - 2020-05-27 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index 7e23db77..f04abd90 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.6" +LIBCST_VERSION: str = "0.3.7" From 2d56ba7a3e302e23c357d9565b1bbb307e5d8c50 Mon Sep 17 00:00:00 2001 From: lrjball <50599110+lrjball@users.noreply.github.com> Date: Mon, 29 Jun 2020 01:15:19 +0100 Subject: [PATCH 029/632] Minor typo in scope_provider.py (#324) Fixed minor typo ('as' was repeated) --- libcst/metadata/scope_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index e1ef8605..ccf2d00d 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -46,7 +46,7 @@ class Access: This scope analysis only analyzes access via a :class:`~libcst.Name` or a :class:`~libcst.Name` node embedded in other node like :class:`~libcst.Call` or :class:`~libcst.Attribute`. It doesn't support type annontation using :class:`~libcst.SimpleString` literal for forward - references. E.g. in this example, the ``"Tree"`` isn't parsed as as an access:: + references. E.g. in this example, the ``"Tree"`` isn't parsed as an access:: class Tree: def __new__(cls) -> "Tree": From 3a7ffafe4534f74399fb774866e3e133db13239a Mon Sep 17 00:00:00 2001 From: jimmylai Date: Mon, 29 Jun 2020 07:54:32 -0700 Subject: [PATCH 030/632] Fix NotEqual position issue (#325) Co-authored-by: Jimmy Lai --- libcst/_nodes/op.py | 10 ++++---- .../metadata/tests/test_position_provider.py | 24 +++++++++++++++++-- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/libcst/_nodes/op.py b/libcst/_nodes/op.py index 8e927c18..54fdd73c 100644 --- a/libcst/_nodes/op.py +++ b/libcst/_nodes/op.py @@ -669,7 +669,7 @@ class GreaterThanEqual(BaseCompOp, _BaseOneTokenOp): @add_slots @dataclass(frozen=True) -class NotEqual(BaseCompOp): +class NotEqual(BaseCompOp, _BaseOneTokenOp): """ A comparison operator that can be used in a :class:`Comparison` expression. @@ -691,7 +691,7 @@ class NotEqual(BaseCompOp): if self.value not in ["!=", "<>"]: raise CSTValidationError("Invalid value for NotEqual node.") - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "BaseCompOp": + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "NotEqual": return self.__class__( whitespace_before=visit_required( self, "whitespace_before", self.whitespace_before, visitor @@ -702,10 +702,8 @@ class NotEqual(BaseCompOp): ), ) - def _codegen_impl(self, state: CodegenState) -> None: - self.whitespace_before._codegen(state) - state.add_token(self.value) - self.whitespace_after._codegen(state) + def _get_token(self) -> str: + return self.value @add_slots diff --git a/libcst/metadata/tests/test_position_provider.py b/libcst/metadata/tests/test_position_provider.py index 07374575..03132428 100644 --- a/libcst/metadata/tests/test_position_provider.py +++ b/libcst/metadata/tests/test_position_provider.py @@ -9,7 +9,7 @@ from typing import Tuple import libcst as cst from libcst import parse_module from libcst._batched_visitor import BatchableCSTVisitor -from libcst._visitors import CSTTransformer +from libcst._visitors import CSTVisitor from libcst.metadata import ( CodeRange, MetadataWrapper, @@ -38,7 +38,7 @@ class PositionProviderTest(UnitTest): """ test = self - class DependentVisitor(CSTTransformer): + class DependentVisitor(CSTVisitor): METADATA_DEPENDENCIES = (PositionProvider,) def visit_Pass(self, node: cst.Pass) -> None: @@ -49,6 +49,26 @@ class PositionProviderTest(UnitTest): wrapper = MetadataWrapper(parse_module("pass")) wrapper.visit(DependentVisitor()) + def test_equal_range(self) -> None: + test = self + expected_range = CodeRange((1, 4), (1, 6)) + + class EqualPositionVisitor(CSTVisitor): + METADATA_DEPENDENCIES = (PositionProvider,) + + def visit_Equal(self, node: cst.Equal) -> None: + test.assertEqual( + self.get_metadata(PositionProvider, node), expected_range + ) + + def visit_NotEqual(self, node: cst.NotEqual) -> None: + test.assertEqual( + self.get_metadata(PositionProvider, node), expected_range + ) + + MetadataWrapper(parse_module("var == 1")).visit(EqualPositionVisitor()) + MetadataWrapper(parse_module("var != 1")).visit(EqualPositionVisitor()) + def test_batchable_provider(self) -> None: test = self From 6a5f71c6b4eebe1f798ecb5c168a5a372fce87a0 Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Tue, 30 Jun 2020 15:40:46 -0400 Subject: [PATCH 031/632] Optional args for tox commands (#327) --- tox.ini | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tox.ini b/tox.ini index 43da6fb0..e3f31025 100644 --- a/tox.ini +++ b/tox.ini @@ -6,32 +6,32 @@ deps = -rrequirements.txt -rrequirements-dev.txt commands = - python -m unittest + python -m unittest {posargs} [testenv:lint] deps = -rrequirements.txt -rrequirements-dev.txt commands = - flake8 - isort --check-only - black --check libcst/ + flake8 {posargs} + isort --check-only -rc {posargs:.} + black --check {posargs:libcst/} [testenv:docs] -deps = +deps = -rrequirements.txt -rrequirements-dev.txt commands = - sphinx-build docs/source/ docs/build/ + sphinx-build {posargs:docs/source/ docs/build/} [testenv:autofix] deps = -rrequirements.txt -rrequirements-dev.txt commands = - flake8 - isort -y -q - black libcst/ + flake8 {posargs} + isort -y -q -rc {posargs:.} + black {posargs:libcst/} [testenv:coverage] deps = From 8523852d05d0e7c231f1810211741d75a6147547 Mon Sep 17 00:00:00 2001 From: Josie Eshkenazi Date: Mon, 6 Jul 2020 11:15:50 -0400 Subject: [PATCH 032/632] FullRepoManager `cache` property (#330) Add public cache property to FullRepoManager --- libcst/metadata/full_repo_manager.py | 10 ++++++++++ libcst/metadata/tests/test_full_repo_manager.py | 12 ++++++++++++ 2 files changed, 22 insertions(+) diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index e1bf10a1..4d41140a 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -40,6 +40,16 @@ class FullRepoManager: self._providers = providers self._paths: List[str] = list(paths) + @property + def cache(self) -> Dict["ProviderT", Mapping[str, object]]: + """ + The full repository cache data for all metadata providers passed in the ``providers`` parameter when + constructing :class:`~libcst.metadata.FullRepoManager`. Each provider is mapped to a mapping of path to cache. + """ + # Make sure that the cache is available to us. If resolve_cache() was called manually then this is a noop. + self.resolve_cache() + return self._cache + def resolve_cache(self) -> None: """ Resolve cache for all providers that require it. Normally this is called by diff --git a/libcst/metadata/tests/test_full_repo_manager.py b/libcst/metadata/tests/test_full_repo_manager.py index 9e57c778..ff7ebfd0 100644 --- a/libcst/metadata/tests/test_full_repo_manager.py +++ b/libcst/metadata/tests/test_full_repo_manager.py @@ -48,3 +48,15 @@ class FullRepoManagerTest(UnitTest): "The path needs to be in paths parameter when constructing FullRepoManager for efficient batch processing.", ): manager.get_metadata_wrapper_for_path(path) + + @patch.object(TypeInferenceProvider, "gen_cache") + def test_get_full_repo_cache(self, gen_cache: Mock) -> None: + path_prefix = "tests/pyre/simple_class" + path = f"{path_prefix}.py" + mock_cache = { + path: json.loads((Path(REPO_ROOT_DIR) / f"{path_prefix}.json").read_text()) + } + gen_cache.return_value = mock_cache + manager = FullRepoManager(REPO_ROOT_DIR, path, [TypeInferenceProvider]) + cache = manager.cache + self.assertEqual(cache, {TypeInferenceProvider: mock_cache}) From 9d3bb11eb8bd0fedd73650d7633c9ef5e9ef7e56 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 9 Jul 2020 16:37:56 +0100 Subject: [PATCH 033/632] Only remove trailing comma if the last alias is removed (#334) --- .../tests/test_remove_unused_imports.py | 7 +++++++ libcst/codemod/visitors/_remove_imports.py | 18 ++++++++++++++---- .../visitors/tests/test_remove_imports.py | 14 ++++++++++++++ 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/libcst/codemod/commands/tests/test_remove_unused_imports.py b/libcst/codemod/commands/tests/test_remove_unused_imports.py index d7b369ed..2c817560 100644 --- a/libcst/codemod/commands/tests/test_remove_unused_imports.py +++ b/libcst/codemod/commands/tests/test_remove_unused_imports.py @@ -83,3 +83,10 @@ class RemoveUnusedImportsCommandTest(CodemodTest): b(0)[x] = False """ self.assertCodemod(before, before) + + def test_no_formatting_if_no_unused_imports(self) -> None: + before = """ + from m import (a, b,) + a(b, 'look at these ugly quotes') + """ + self.assertCodemod(before, before) diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 8c3dc9f8..318372d8 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -343,17 +343,22 @@ class RemoveImportsVisitor(ContextAwareTransformer): names_to_keep.append(import_alias) continue + # no changes + if names_to_keep == original_node.names: + return updated_node + # Now, either remove this statement or remove the imports we are # deleting from this statement. if len(names_to_keep) == 0: return cst.RemoveFromParent() - else: + + if names_to_keep[-1] != original_node.names[-1]: # Remove trailing comma in order to not mess up import statements. names_to_keep = [ *names_to_keep[:-1], names_to_keep[-1].with_changes(comma=cst.MaybeSentinel.DEFAULT), ] - return updated_node.with_changes(names=names_to_keep) + return updated_node.with_changes(names=names_to_keep) def leave_ImportFrom( self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom @@ -399,14 +404,19 @@ class RemoveImportsVisitor(ContextAwareTransformer): names_to_keep.append(import_alias) continue + # no changes + if names_to_keep == names: + return updated_node + # Now, either remove this statement or remove the imports we are # deleting from this statement. if len(names_to_keep) == 0: return cst.RemoveFromParent() - else: + + if names_to_keep[-1] != names[-1]: # Remove trailing comma in order to not mess up import statements. names_to_keep = [ *names_to_keep[:-1], names_to_keep[-1].with_changes(comma=cst.MaybeSentinel.DEFAULT), ] - return updated_node.with_changes(names=names_to_keep) + return updated_node.with_changes(names=names_to_keep) diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index 21932b65..d8d0e186 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -797,3 +797,17 @@ class TestRemoveImportsCodemod(CodemodTest): after, RemoveImportTransformer(CodemodContext()).transform_module(module).code, ) + + def test_remove_comma(self) -> None: + """ + Trailing commas should be removed if and only if the last alias is removed. + """ + before = """ + from m import (a, b,) + import x, y + """ + after = """ + from m import (b,) + import x + """ + self.assertCodemod(before, after, [("m", "a", None), ("y", None, None)]) From 7219efcd3f3b83ef90847a5675af78bcbb374f7b Mon Sep 17 00:00:00 2001 From: Maggie Moss Date: Mon, 13 Jul 2020 17:31:48 -0700 Subject: [PATCH 034/632] [Pyre] Handle type subscripts when applying annotations. (#335) * [Pyre] Handle type subscripts when applying annotations. * Fix lint. * Use matchers syntax instead of isinstance --- .../visitors/_apply_type_annotations.py | 5 +++- .../tests/test_apply_type_annotations.py | 27 +++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 291d0452..ba3cd700 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -8,6 +8,7 @@ from dataclasses import dataclass, field from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst as cst +from libcst import matchers as m from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor @@ -133,6 +134,8 @@ class TypeCollector(cst.CSTVisitor): def _handle_Subscript(self, node: cst.Subscript) -> cst.Subscript: slice = node.slice + if m.matches(node.value, m.Name(value="Type")): + return node if isinstance(slice, list): new_slice = [] for item in slice: @@ -163,7 +166,7 @@ class TypeCollector(cst.CSTVisitor): return cst.Annotation(annotation=attr) if isinstance(annotation, cst.Subscript): value = annotation.value - if isinstance(value, cst.Name) and value.value == "Type": + if m.matches(value, m.Name(value="Type")): return returns return cst.Annotation(annotation=self._handle_Subscript(annotation)) else: diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 2e4027b0..90e7b58c 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -624,6 +624,33 @@ class TestApplyAnnotationsVisitor(CodemodTest): return [] """, ), + ( + """ + from typing import Dict + + example: Dict[str, Type[foo.Example]] = ... + """, + """ + from typing import Type + + def foo() -> Type[foo.Example]: + class Example: + pass + return Example + + example = { "test": foo() } + """, + """ + from typing import Dict, Type + + def foo() -> Type[foo.Example]: + class Example: + pass + return Example + + example: Dict[str, Type[foo.Example]] = { "test": foo() } + """, + ), ) ) def test_annotate_functions(self, stub: str, before: str, after: str) -> None: From 7208012c98527c0c8526781ef7b5951f2880badf Mon Sep 17 00:00:00 2001 From: jimmylai Date: Mon, 13 Jul 2020 19:16:09 -0700 Subject: [PATCH 035/632] insert imports after module docstring (#343) Co-authored-by: Jimmy Lai --- libcst/codemod/visitors/_add_imports.py | 28 ++++++++++++++----- .../visitors/tests/test_add_imports.py | 21 ++++++++++++++ 2 files changed, 42 insertions(+), 7 deletions(-) diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index aa487dd8..ccafc9c2 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -248,7 +248,9 @@ class AddImportsVisitor(ContextAwareTransformer): ) -> Tuple[ List[Union[libcst.SimpleStatementLine, libcst.BaseCompoundStatement]], List[Union[libcst.SimpleStatementLine, libcst.BaseCompoundStatement]], + List[Union[libcst.SimpleStatementLine, libcst.BaseCompoundStatement]], ]: + statement_before_import_location = 0 import_add_location = 0 # never insert an import before initial __strict__ flag @@ -267,14 +269,18 @@ class AddImportsVisitor(ContextAwareTransformer): ] ), ): - import_add_location = 1 + statement_before_import_location = import_add_location = 1 # This works under the principle that while we might modify node contents, # we have yet to modify the number of statements. So we can match on the # original tree but break up the statements of the modified tree. If we # change this assumption in this visitor, we will have to change this code. for i, statement in enumerate(orig_module.body): - if isinstance(statement, libcst.SimpleStatementLine): + if m.matches( + statement, m.SimpleStatementLine(body=[m.Expr(value=m.SimpleString())]) + ): + statement_before_import_location = import_add_location = 1 + elif isinstance(statement, libcst.SimpleStatementLine): for possible_import in statement.body: for last_import in self.all_imports: if possible_import is last_import: @@ -282,7 +288,12 @@ class AddImportsVisitor(ContextAwareTransformer): break return ( - list(updated_module.body[:import_add_location]), + list(updated_module.body[:statement_before_import_location]), + list( + updated_module.body[ + statement_before_import_location:import_add_location + ] + ), list(updated_module.body[import_add_location:]), ) @@ -325,9 +336,11 @@ class AddImportsVisitor(ContextAwareTransformer): return updated_node # First, find the insertion point for imports - statements_before_imports, statements_after_imports = self._split_module( - original_node, updated_node - ) + ( + statements_before_imports, + statements_until_add_imports, + statements_after_imports, + ) = self._split_module(original_node, updated_node) # Make sure there's at least one empty line before the first non-import statements_after_imports = self._insert_empty_line(statements_after_imports) @@ -348,6 +361,7 @@ class AddImportsVisitor(ContextAwareTransformer): # Now, add all of the imports we need! return updated_node.with_changes( body=( + *statements_before_imports, *[ parse_statement( f"from {module} import " @@ -362,7 +376,7 @@ class AddImportsVisitor(ContextAwareTransformer): for module, aliases in module_and_alias_mapping.items() if module == "__future__" ], - *statements_before_imports, + *statements_until_add_imports, *[ parse_statement( f"import {module}", config=updated_node.config_for_parsing diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index cfc4b26b..57060269 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -637,3 +637,24 @@ class TestAddImportsCodemod(CodemodTest): [("a", "f", None), ("a", "g", "y"), ("a", "c", None), ("a", "d", "x")], context_override=CodemodContext(full_module_name="a.b.foobar"), ) + + def test_import_in_docstring_module(self) -> None: + """ + The import should be added after module docstring. + """ + before = """ + '''Docstring.''' + import typing + """ + after = """ + '''Docstring.''' + from __future__ import annotations + import typing + """ + + self.assertCodemod( + before, + after, + [("__future__", "annotations", None)], + context_override=CodemodContext(full_module_name="a.b.foobar"), + ) From f36eacb1327830fad51e08084b46601cad681ae0 Mon Sep 17 00:00:00 2001 From: Sebastian Kreft Date: Mon, 13 Jul 2020 22:32:33 -0400 Subject: [PATCH 036/632] fix: improve validation for ImportAlias and Try statements (#340) * fix: improve validation for ImportAlias and Try statements For `Try` statements we ensure that the bare except, if present, is at the last position. For ImportAlias we ensure that the imported name is valid. Fixes #287 * Apply suggestions from code review Add missing periods. * Apply suggestions from code review Add missing periods. * Update libcst/_nodes/tests/test_import.py Co-authored-by: jimmylai --- libcst/_nodes/statement.py | 13 ++++++++++++- libcst/_nodes/tests/test_import.py | 12 ++++++++++++ libcst/_nodes/tests/test_try.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 53 insertions(+), 1 deletion(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 08db5f8f..7f99b868 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -904,8 +904,11 @@ class Try(BaseCompoundStatement): if len(self.handlers) == 0 and self.orelse is not None: raise CSTValidationError( "A Try statement must have at least one ExceptHandler in order " - + "to have an Else" + + "to have an Else." ) + # Check bare excepts are always at the last position + if any(handler.type is None for handler in self.handlers[:-1]): + raise CSTValidationError("The bare except: handler must be the last one.") def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "Try": return Try( @@ -972,6 +975,14 @@ class ImportAlias(CSTNode): raise CSTValidationError( "Must use a Name node for AsName name inside ImportAlias." ) + try: + self.evaluated_name + except Exception as e: + if str(e) == "Logic error!": + raise CSTValidationError( + "The imported name must be a valid qualified name." + ) + raise e def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ImportAlias": return ImportAlias( diff --git a/libcst/_nodes/tests/test_import.py b/libcst/_nodes/tests/test_import.py index cd56da51..f911029c 100644 --- a/libcst/_nodes/tests/test_import.py +++ b/libcst/_nodes/tests/test_import.py @@ -195,6 +195,18 @@ class ImportCreateTest(CSTNodeTest): ), "expected_re": "at least one space", }, + { + "get_node": lambda: cst.Import( + names=[ + cst.ImportAlias( + name=cst.Attribute( + value=cst.Float(value="0."), attr=cst.Name(value="A") + ) + ) + ] + ), + "expected_re": "imported name must be a valid qualified name.", + }, ) ) def test_invalid(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index b9492a7a..df4a6ae4 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -374,6 +374,35 @@ class TryTest(CSTNodeTest): ), "expected_re": "at least one ExceptHandler in order to have an Else", }, + { + "get_node": lambda: cst.Try( + body=cst.SimpleStatementSuite(body=[cst.Pass()]), + handlers=( + cst.ExceptHandler( + body=cst.SimpleStatementSuite(body=[cst.Pass()]), + ), + cst.ExceptHandler( + body=cst.SimpleStatementSuite(body=[cst.Pass()]), + ), + ), + ), + "expected_re": "The bare except: handler must be the last one.", + }, + { + "get_node": lambda: cst.Try( + body=cst.SimpleStatementSuite(body=[cst.Pass()]), + handlers=( + cst.ExceptHandler( + body=cst.SimpleStatementSuite(body=[cst.Pass()]), + ), + cst.ExceptHandler( + body=cst.SimpleStatementSuite(body=[cst.Pass()]), + type=cst.Name("Exception"), + ), + ), + ), + "expected_re": "The bare except: handler must be the last one.", + }, ) ) def test_invalid(self, **kwargs: Any) -> None: From a9177e27bceda0a53a232d01e9643600a6bde717 Mon Sep 17 00:00:00 2001 From: Sebastian Kreft Date: Wed, 15 Jul 2020 15:59:38 -0400 Subject: [PATCH 037/632] fix: allow ParenthesizedWhitespace before params in FuncDef (#342) * fix: allow ParenthesizedWhitespace before params in FuncDef Fixes #303 * fix: run codegen --- libcst/_nodes/statement.py | 2 +- libcst/_nodes/tests/test_funcdef.py | 16 ++++++++++++++++ libcst/matchers/__init__.py | 6 +++--- 3 files changed, 20 insertions(+), 4 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 7f99b868..aecbc43c 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -1572,7 +1572,7 @@ class FunctionDef(BaseCompoundStatement): #: Whitespace after the opening parenthesis for the parameters but before #: the first param itself. - whitespace_before_params: SimpleWhitespace = SimpleWhitespace.field("") + whitespace_before_params: BaseParenthesizableWhitespace = SimpleWhitespace.field("") #: Whitespace after the closing parenthesis or return annotation and before #: the colon. diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 472d30aa..9fef83a6 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -665,6 +665,22 @@ class FunctionDefCreationTest(CSTNodeTest): "code": "* third", "expected_position": CodeRange((1, 0), (1, 7)), }, + { + "node": cst.FunctionDef( + name=cst.Name(value="foo",), + params=cst.Parameters( + params=[cst.Param(name=cst.Name(value="param1",),),], + ), + body=cst.IndentedBlock( + body=[cst.SimpleStatementLine(body=[cst.Pass(),],),], + ), + whitespace_before_params=cst.ParenthesizedWhitespace( + last_line=cst.SimpleWhitespace(value=" ",), + ), + ), + "code": "def foo(\n param1):\n pass\n", + "expected_position": CodeRange((1, 0), (3, 8)), + }, ) ) def test_valid(self, **kwargs: Any) -> None: diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 25a4d439..8bd9f6b6 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -5799,10 +5799,10 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): AllOf[SimpleWhitespaceMatchType], ] = DoNotCare() whitespace_before_params: Union[ - SimpleWhitespaceMatchType, + BaseParenthesizableWhitespaceMatchType, DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], ] = DoNotCare() whitespace_before_colon: Union[ SimpleWhitespaceMatchType, From cf51c73c09d34fb140f3e89b4443e6afa50edb50 Mon Sep 17 00:00:00 2001 From: Lucia Li Date: Wed, 22 Jul 2020 16:33:38 -0700 Subject: [PATCH 038/632] [release] Bump LibCST to new release 0.3.8 --- CHANGELOG.md | 17 +++++++++++++++++ libcst/_version.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a37530d2..45bbc2a2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,20 @@ +# 0.3.8 - 2020-07-22 + +## Added + - Handle type subscripts when applying annotations. [#335](https://github.com/Instagram/LibCST/pull/335) + - Added FullRepoManager `cache` property [#330](https://github.com/Instagram/LibCST/pull/330) + - Added optional args for tox commands [#327](https://github.com/Instagram/LibCST/pull/327) + +## Updated + - Only remove trailing comma if the last alias is removed [#334](https://github.com/Instagram/LibCST/pull/334) + +## Fixed + - Fixed inserting imports after module docstring [#343](https://github.com/Instagram/LibCST/pull/343) + - Fixed ParenthesizedWhitespace before params in FuncDef [#342](https://github.com/Instagram/LibCST/pull/342) + - Fixed validation for ImportAlias and Try statements [#340](https://github.com/Instagram/LibCST/pull/340) + - Fixed NotEqual position issue [#325](https://github.com/Instagram/LibCST/pull/325) + - Fixed minor typo in scope_provider.py [#324](https://github.com/Instagram/LibCST/pull/324) + # 0.3.7 - 2020-06-24 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index f04abd90..87ce5f25 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.7" +LIBCST_VERSION: str = "0.3.8" From 0870e41a79f1b5f77af265436a03ecf8e2196680 Mon Sep 17 00:00:00 2001 From: jimmylai Date: Fri, 24 Jul 2020 11:25:09 -0700 Subject: [PATCH 039/632] [scope] add scope for ClassDef.name node (#349) Co-authored-by: Jimmy Lai --- libcst/metadata/scope_provider.py | 1 + libcst/metadata/tests/test_scope_provider.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index ccf2d00d..991b5888 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -733,6 +733,7 @@ class ScopeVisitor(cst.CSTVisitor): def visit_ClassDef(self, node: cst.ClassDef) -> Optional[bool]: self.scope.record_assignment(node.name.value, node) + self.provider.set_metadata(node.name, self.scope) for decorator in node.decorators: decorator.visit(self) for base in node.bases: diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index f84a415c..93b602f4 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -235,8 +235,9 @@ class ScopeProviderTest(UnitTest): """ ) scope_of_module = scopes[m] - func_body = ensure_type(m.body[1], cst.FunctionDef).body - func_body_statement = func_body.body[0] + func_def = ensure_type(m.body[1], cst.FunctionDef) + self.assertEqual(scopes[func_def], scopes[func_def.name]) + func_body_statement = func_def.body.body[0] scope_of_func = scopes[func_body_statement] self.assertIsInstance(scope_of_func, FunctionScope) self.assertTrue("global_var" in scope_of_module) @@ -266,6 +267,7 @@ class ScopeProviderTest(UnitTest): cls_assignment = cast(Assignment, cls_assignments[0]) cls_def = ensure_type(m.body[1], cst.ClassDef) self.assertEqual(cls_assignment.node, cls_def) + self.assertEqual(scopes[cls_def], scopes[cls_def.name]) cls_body = cls_def.body cls_body_statement = cls_body.body[0] scope_of_class = scopes[cls_body_statement] From eb761c2c67039196f08afc8b4b4e51ee8fe37a4e Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Tue, 28 Jul 2020 08:58:59 -0700 Subject: [PATCH 040/632] [scope] add scope for ImportAlias (#350) --- libcst/metadata/scope_provider.py | 1 + libcst/metadata/tests/test_scope_provider.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 991b5888..bb0adc56 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -659,6 +659,7 @@ class ScopeVisitor(cst.CSTVisitor): # make sure node.names is Sequence[ImportAlias] for name in names: + self.provider.set_metadata(name, self.scope) asname = name.asname if asname is not None: name_values = _gen_dotted_names(cst.ensure_type(asname.name, cst.Name)) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 93b602f4..31c1ac1b 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -140,6 +140,16 @@ class ScopeProviderTest(UnitTest): """ ) scope_of_module = scopes[m] + + import_0 = cst.ensure_type( + cst.ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.Import + ) + self.assertEqual(scopes[import_0], scope_of_module) + import_aliases = import_0.names + if not isinstance(import_aliases, cst.ImportStar): + for alias in import_aliases: + self.assertEqual(scopes[alias], scope_of_module) + for idx, in_scopes in enumerate( [["foo", "foo.bar"], ["fizzbuzz"], ["a", "a.b", "a.b.c"], ["g"],] ): @@ -202,6 +212,16 @@ class ScopeProviderTest(UnitTest): """ ) scope_of_module = scopes[m] + + import_from = cst.ensure_type( + cst.ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.ImportFrom + ) + self.assertEqual(scopes[import_from], scope_of_module) + import_aliases = import_from.names + if not isinstance(import_aliases, cst.ImportStar): + for alias in import_aliases: + self.assertEqual(scopes[alias], scope_of_module) + for idx, in_scope in [(0, "a"), (0, "b_renamed"), (1, "c"), (2, "d")]: self.assertEqual( len(scope_of_module[in_scope]), 1, f"{in_scope} should be in scope." From f8fdc00aebb0108175cecd3f15a146d5da3ec4f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?H=E1=BB=93=20=C4=90=E1=BB=A9c=20Hi=E1=BA=BFu?= Date: Wed, 29 Jul 2020 02:05:55 +0700 Subject: [PATCH 041/632] Update README.rst (#351) * Update README.rst Add href link of MIT licensed * Update README.rst * Update README.rst Co-authored-by: Jimmy Lai --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 0fa3edf4..3096b354 100644 --- a/README.rst +++ b/README.rst @@ -93,7 +93,7 @@ To examine the tree that is parsed from a particular file, do the following:: python -m libcst.tool print -Alternatively you can import LibCST into a Python REPL and use the included parser +Alternatively, you can import LibCST into a Python REPL and use the included parser and pretty printing functions: >>> import libcst as cst @@ -207,7 +207,7 @@ Future License ======= -LibCST is MIT licensed, as found in the LICENSE file. +LibCST is `MIT licensed `_, as found in the LICENSE file. .. fb-docs-start From 6a5e7394c023df75bb4d98fc10fb5362b4f3a0bb Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 31 Jul 2020 15:54:39 +0100 Subject: [PATCH 042/632] [RemoveUnusedImports] Support string type annotations (#353) * [RemoveUnusedImports] Support string type annotations This PR adds support for detecting imports being used by string type annotations, as well as imports suppressed by comments. It breaks up the existing visitor into multiple smaller, single-purpose visitors, and composes them together. --- docs/source/codemods.rst | 16 ++- .../codemod/commands/remove_unused_imports.py | 71 ++++++++-- .../tests/test_remove_unused_imports.py | 30 ++++ .../codemod_formatter_error_input.py.txt | 2 +- libcst/codemod/visitors/__init__.py | 12 +- libcst/codemod/visitors/_gather_comments.py | 51 +++++++ .../_gather_string_annotation_names.py | 81 +++++++++++ .../visitors/_gather_unused_imports.py | 124 +++++++++++++++++ libcst/codemod/visitors/_remove_imports.py | 58 ++------ .../visitors/tests/test_gather_comments.py | 46 +++++++ .../test_gather_string_annotation_names.py | 82 +++++++++++ .../tests/test_gather_unused_imports.py | 128 ++++++++++++++++++ 12 files changed, 640 insertions(+), 61 deletions(-) create mode 100644 libcst/codemod/visitors/_gather_comments.py create mode 100644 libcst/codemod/visitors/_gather_string_annotation_names.py create mode 100644 libcst/codemod/visitors/_gather_unused_imports.py create mode 100644 libcst/codemod/visitors/tests/test_gather_comments.py create mode 100644 libcst/codemod/visitors/tests/test_gather_string_annotation_names.py create mode 100644 libcst/codemod/visitors/tests/test_gather_unused_imports.py diff --git a/docs/source/codemods.rst b/docs/source/codemods.rst index 3711a8f3..f59f0cbf 100644 --- a/docs/source/codemods.rst +++ b/docs/source/codemods.rst @@ -146,12 +146,18 @@ LibCST additionally includes a library of transforms to reduce the need for boil inside codemods. As of now, the list includes the following helpers. .. autoclass:: libcst.codemod.visitors.GatherImportsVisitor - :exclude-members: visit_Import, visit_ImportFrom + :no-undoc-members: .. autoclass:: libcst.codemod.visitors.GatherExportsVisitor - :exclude-members: visit_AnnAssign, leave_AnnAssign, visit_Assign, leave_Assign, visit_List, leave_List, visit_Tuple, leave_Tuple, visit_Set, leave_Set, visit_Element + :no-undoc-members: .. autoclass:: libcst.codemod.visitors.AddImportsVisitor - :exclude-members: CONTEXT_KEY, visit_Module, leave_ImportFrom, leave_Module + :no-undoc-members: .. autoclass:: libcst.codemod.visitors.RemoveImportsVisitor - :exclude-members: CONTEXT_KEY, METADATA_DEPENDENCIES, visit_Module, leave_ImportFrom, leave_Import + :no-undoc-members: .. autoclass:: libcst.codemod.visitors.ApplyTypeAnnotationsVisitor - :exclude-members: CONTEXT_KEY, transform_module_impl, visit_ClassDef, visit_Comment, visit_FunctionDef, leave_Assign, leave_ClassDef, leave_FunctionDef, leave_ImportFrom, leave_Module + :no-undoc-members: +.. autoclass:: libcst.codemod.visitors.GatherUnusedImportsVisitor + :no-undoc-members: +.. autoclass:: libcst.codemod.visitors.GatherCommentsVisitor + :no-undoc-members: +.. autoclass:: libcst.codemod.visitors.GatherNamesFromStringAnnotationsVisitor + :no-undoc-members: \ No newline at end of file diff --git a/libcst/codemod/commands/remove_unused_imports.py b/libcst/codemod/commands/remove_unused_imports.py index 1c38153f..1f23a264 100644 --- a/libcst/codemod/commands/remove_unused_imports.py +++ b/libcst/codemod/commands/remove_unused_imports.py @@ -4,9 +4,18 @@ # LICENSE file in the root directory of this source tree. # -from libcst import Import, ImportFrom -from libcst.codemod import VisitorBasedCodemodCommand -from libcst.codemod.visitors import RemoveImportsVisitor +from typing import Set, Tuple, Union + +from libcst import Import, ImportFrom, ImportStar, Module +from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand +from libcst.codemod.visitors import GatherCommentsVisitor, RemoveImportsVisitor +from libcst.helpers import get_absolute_module_for_import +from libcst.metadata import PositionProvider, ProviderT + + +DEFAULT_SUPPRESS_COMMENT_REGEX = ( + r".*\W(noqa|lint-ignore: ?unused-import|lint-ignore: ?F401)(\W.*)?$" +) class RemoveUnusedImportsCommand(VisitorBasedCodemodCommand): @@ -17,21 +26,65 @@ class RemoveUnusedImportsCommand(VisitorBasedCodemodCommand): to track cross-references between them. If a symbol is imported in a file but otherwise unused in it, that import will be removed even if it is being referenced from another file. - - It currently doesn't keep track of string type annotations, so an import - for `MyType` used only in `def f() -> "MyType"` will be removed. """ DESCRIPTION: str = ( "Remove all imports that are not used in a file. " "Note: only considers the file in isolation. " - "Note: does not account for usages in string type annotations. " ) + METADATA_DEPENDENCIES: Tuple[ProviderT] = (PositionProvider,) + + def __init__(self, context: CodemodContext) -> None: + super().__init__(context) + self._ignored_lines: Set[int] = set() + + def visit_Module(self, node: Module) -> bool: + comment_visitor = GatherCommentsVisitor( + self.context, DEFAULT_SUPPRESS_COMMENT_REGEX + ) + node.visit(comment_visitor) + self._ignored_lines = set(comment_visitor.comments.keys()) + return True + def visit_Import(self, node: Import) -> bool: - RemoveImportsVisitor.remove_unused_import_by_node(self.context, node) + self._handle_import(node) return False def visit_ImportFrom(self, node: ImportFrom) -> bool: - RemoveImportsVisitor.remove_unused_import_by_node(self.context, node) + self._handle_import(node) return False + + def _handle_import(self, node: Union[Import, ImportFrom]) -> None: + node_start = self.get_metadata(PositionProvider, node).start.line + if node_start in self._ignored_lines: + return + + names = node.names + if isinstance(names, ImportStar): + return + + for alias in names: + position = self.get_metadata(PositionProvider, alias) + lines = set(range(position.start.line, position.end.line + 1)) + if lines.isdisjoint(self._ignored_lines): + if isinstance(node, Import): + RemoveImportsVisitor.remove_unused_import( + self.context, + module=alias.evaluated_name, + asname=alias.evaluated_alias, + ) + else: + module_name = get_absolute_module_for_import( + self.context.full_module_name, node + ) + if module_name is None: + raise ValueError( + f"Couldn't get absolute module name for {alias.evaluated_name}" + ) + RemoveImportsVisitor.remove_unused_import( + self.context, + module=module_name, + obj=alias.evaluated_name, + asname=alias.evaluated_alias, + ) diff --git a/libcst/codemod/commands/tests/test_remove_unused_imports.py b/libcst/codemod/commands/tests/test_remove_unused_imports.py index 2c817560..c685f5c8 100644 --- a/libcst/codemod/commands/tests/test_remove_unused_imports.py +++ b/libcst/codemod/commands/tests/test_remove_unused_imports.py @@ -90,3 +90,33 @@ class RemoveUnusedImportsCommandTest(CodemodTest): a(b, 'look at these ugly quotes') """ self.assertCodemod(before, before) + + def test_suppression_on_first_line_of_multiline_import_refers_to_whole_block( + self, + ) -> None: + before = """ + from a import ( # lint-ignore: unused-import + b, + c, + ) + """ + self.assertCodemod(before, before) + + def test_suppression(self) -> None: + before = """ + # noqa + import a, b + import c + from x import ( + y, + z, # noqa + ) + """ + after = """ + # noqa + import a, b + from x import ( + z, # noqa + ) + """ + self.assertCodemod(before, after) diff --git a/libcst/codemod/tests/codemod_formatter_error_input.py.txt b/libcst/codemod/tests/codemod_formatter_error_input.py.txt index a0ec9d8d..c83c175a 100644 --- a/libcst/codemod/tests/codemod_formatter_error_input.py.txt +++ b/libcst/codemod/tests/codemod_formatter_error_input.py.txt @@ -5,7 +5,7 @@ # # pyre-strict -import subprocess # noqa: F401 +import subprocess from contextlib import AsyncExitStack diff --git a/libcst/codemod/visitors/__init__.py b/libcst/codemod/visitors/__init__.py index 97dbeb99..bcc570be 100644 --- a/libcst/codemod/visitors/__init__.py +++ b/libcst/codemod/visitors/__init__.py @@ -5,15 +5,23 @@ # from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._apply_type_annotations import ApplyTypeAnnotationsVisitor +from libcst.codemod.visitors._gather_comments import GatherCommentsVisitor from libcst.codemod.visitors._gather_exports import GatherExportsVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor +from libcst.codemod.visitors._gather_string_annotation_names import ( + GatherNamesFromStringAnnotationsVisitor, +) +from libcst.codemod.visitors._gather_unused_imports import GatherUnusedImportsVisitor from libcst.codemod.visitors._remove_imports import RemoveImportsVisitor __all__ = [ "AddImportsVisitor", - "GatherImportsVisitor", - "GatherExportsVisitor", "ApplyTypeAnnotationsVisitor", + "GatherCommentsVisitor", + "GatherExportsVisitor", + "GatherImportsVisitor", + "GatherNamesFromStringAnnotationsVisitor", + "GatherUnusedImportsVisitor", "RemoveImportsVisitor", ] diff --git a/libcst/codemod/visitors/_gather_comments.py b/libcst/codemod/visitors/_gather_comments.py new file mode 100644 index 00000000..d2510027 --- /dev/null +++ b/libcst/codemod/visitors/_gather_comments.py @@ -0,0 +1,51 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import re +from typing import Dict, Pattern, Union + +import libcst as cst +import libcst.matchers as m +from libcst.codemod._context import CodemodContext +from libcst.codemod._visitor import ContextAwareVisitor +from libcst.metadata import PositionProvider + + +class GatherCommentsVisitor(ContextAwareVisitor): + """ + Collects all comments matching a certain regex and their line numbers. + This visitor is useful for capturing special-purpose comments, for example + ``noqa`` style lint suppression annotations. + + Standalone comments are assumed to affect the line following them, and + inline ones are recorded with the line they are on. + + After visiting a CST, matching comments are collected in the ``comments`` + attribute. + """ + + METADATA_DEPENDENCIES = (PositionProvider,) + + def __init__(self, context: CodemodContext, comment_regex: str) -> None: + super().__init__(context) + + #: Dictionary of comments found in the CST. Keys are line numbers, + #: values are comment nodes. + self.comments: Dict[int, cst.Comment] = {} + + self._comment_matcher: Pattern[str] = re.compile(comment_regex) + + @m.visit(m.EmptyLine(comment=m.DoesNotMatch(None))) + @m.visit(m.TrailingWhitespace(comment=m.DoesNotMatch(None))) + def visit_comment(self, node: Union[cst.EmptyLine, cst.TrailingWhitespace]) -> None: + comment = node.comment + assert comment is not None # hello, type checker + if not self._comment_matcher.match(comment.value): + return + line = self.get_metadata(PositionProvider, comment).start.line + if isinstance(node, cst.EmptyLine): + # Standalone comments refer to the next line + line += 1 + self.comments[line] = comment diff --git a/libcst/codemod/visitors/_gather_string_annotation_names.py b/libcst/codemod/visitors/_gather_string_annotation_names.py new file mode 100644 index 00000000..80b73c76 --- /dev/null +++ b/libcst/codemod/visitors/_gather_string_annotation_names.py @@ -0,0 +1,81 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Set, Union, cast + +import libcst as cst +import libcst.matchers as m +from libcst.codemod._context import CodemodContext +from libcst.codemod._visitor import ContextAwareVisitor +from libcst.metadata import MetadataWrapper, QualifiedNameProvider + + +FUNCS_CONSIDERED_AS_STRING_ANNOTATIONS = {"typing.TypeVar"} +ANNOTATION_MATCHER: m.BaseMatcherNode = m.Annotation() | m.Call( + metadata=m.MatchMetadataIfTrue( + QualifiedNameProvider, + lambda qualnames: any( + qn.name in FUNCS_CONSIDERED_AS_STRING_ANNOTATIONS for qn in qualnames + ), + ) +) + + +class GatherNamesFromStringAnnotationsVisitor(ContextAwareVisitor): + """ + Collects all names from string literals used for typing purposes. + This includes annotations like ``foo: "SomeType"``, and parameters to + special functions related to typing (currently only `typing.TypeVar`). + + After visiting, a set of all found names will be available on the ``names`` + attribute of this visitor. + """ + + METADATA_DEPENDENCIES = (QualifiedNameProvider,) + + def __init__(self, context: CodemodContext) -> None: + super().__init__(context) + + #: The set of names collected from string literals. + self.names: Set[str] = set() + + @m.call_if_inside(ANNOTATION_MATCHER) + @m.visit(m.ConcatenatedString()) + def handle_any_string( + self, node: Union[cst.SimpleString, cst.ConcatenatedString] + ) -> None: + value = node.evaluated_value + if value is None: + return + mod = cst.parse_module(value) + extracted_nodes = m.extractall( + mod, + m.Name( + value=m.SaveMatchedNode(m.DoNotCare(), "name"), + metadata=m.MatchMetadataIfTrue( + cst.metadata.ParentNodeProvider, + lambda parent: not isinstance(parent, cst.Attribute), + ), + ) + | m.SaveMatchedNode(m.Attribute(), "attribute"), + metadata_resolver=MetadataWrapper(mod, unsafe_skip_copy=True), + ) + names = { + cast(str, values["name"]) for values in extracted_nodes if "name" in values + } | { + name + for values in extracted_nodes + if "attribute" in values + for name, _ in cst.metadata.scope_provider._gen_dotted_names( + cast(cst.Attribute, values["attribute"]) + ) + } + self.names.update(names) + + @m.call_if_inside(ANNOTATION_MATCHER) + @m.call_if_not_inside(m.ConcatenatedString()) + @m.visit(m.SimpleString()) + def handle_simple_string(self, node: cst.SimpleString) -> None: + self.handle_any_string(node) diff --git a/libcst/codemod/visitors/_gather_unused_imports.py b/libcst/codemod/visitors/_gather_unused_imports.py new file mode 100644 index 00000000..359eee69 --- /dev/null +++ b/libcst/codemod/visitors/_gather_unused_imports.py @@ -0,0 +1,124 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# + +from typing import Iterable, Set, Tuple, Union + +import libcst as cst +import libcst.matchers as m +from libcst.codemod._context import CodemodContext +from libcst.codemod._visitor import ContextAwareVisitor +from libcst.codemod.visitors._gather_exports import GatherExportsVisitor +from libcst.codemod.visitors._gather_string_annotation_names import ( + GatherNamesFromStringAnnotationsVisitor, +) +from libcst.metadata import ProviderT, ScopeProvider +from libcst.metadata.scope_provider import _gen_dotted_names + + +class GatherUnusedImportsVisitor(ContextAwareVisitor): + """ + Collects all imports from a module not directly used in the same module. + Intended to be instantiated and passed to a :class:`libcst.Module` + :meth:`~libcst.CSTNode.visit` method to process the full module. + + Note that imports that are only used indirectly (from other modules) are + still collected. + + After visiting a module the attribute ``unused_imports`` will contain a + set of unused :class:`~libcst.ImportAlias` objects, paired with their + parent import node. + """ + + METADATA_DEPENDENCIES: Tuple[ProviderT] = ( + *GatherNamesFromStringAnnotationsVisitor.METADATA_DEPENDENCIES, + ScopeProvider, + ) + + def __init__(self, context: CodemodContext) -> None: + super().__init__(context) + + self._string_annotation_names: Set[str] = set() + self._exported_names: Set[str] = set() + #: Contains a set of (alias, parent_import) pairs that are not used + #: in the module after visiting. + self.unused_imports: Set[ + Tuple[cst.ImportAlias, Union[cst.Import, cst.ImportFrom]] + ] = set() + + def visit_Module(self, node: cst.Module) -> bool: + export_collector = GatherExportsVisitor(self.context) + node.visit(export_collector) + self._exported_names = export_collector.explicit_exported_objects + annotation_visitor = GatherNamesFromStringAnnotationsVisitor(self.context) + node.visit(annotation_visitor) + self._string_annotation_names = annotation_visitor.names + return True + + @m.visit( + m.Import() + | m.ImportFrom( + module=m.DoesNotMatch(m.Name("__future__")), + names=m.DoesNotMatch(m.ImportStar()), + ) + ) + def handle_import(self, node: Union[cst.Import, cst.ImportFrom]) -> None: + names = node.names + assert not isinstance(names, cst.ImportStar) # hello, type checker + + for alias in names: + self.unused_imports.add((alias, node)) + + def leave_Module(self, original_node: cst.Module) -> None: + self.unused_imports = self.filter_unused_imports(self.unused_imports) + + def filter_unused_imports( + self, + candidates: Iterable[Tuple[cst.ImportAlias, Union[cst.Import, cst.ImportFrom]]], + ) -> Set[Tuple[cst.ImportAlias, Union[cst.Import, cst.ImportFrom]]]: + """ + Return the imports in ``candidates`` which are not used. + + This function implements the main logic of this visitor, and is called after traversal. It calls :meth:`~is_in_use` on each import. + + Override this in a subclass for additional filtering. + """ + unused_imports = set() + for (alias, parent) in candidates: + scope = self.get_metadata(ScopeProvider, parent) + if scope is None: + continue + if not self.is_in_use(scope, alias): + unused_imports.add((alias, parent)) + return unused_imports + + def is_in_use(self, scope: cst.metadata.Scope, alias: cst.ImportAlias) -> bool: + """ + Check if ``alias`` is in use in the given ``scope``. + + An alias is in use if it's directly referenced, exported, or appears in + a string type annotation. Override this in a subclass for additional + filtering. + """ + asname = alias.asname + names = _gen_dotted_names( + cst.ensure_type(asname.name, cst.Name) if asname is not None else alias.name + ) + + for name_or_alias, _ in names: + if ( + name_or_alias in self._exported_names + or name_or_alias in self._string_annotation_names + ): + return True + + for assignment in scope[name_or_alias]: + if ( + isinstance(assignment, cst.metadata.Assignment) + and isinstance(assignment.node, (cst.ImportFrom, cst.Import)) + and len(assignment.references) > 0 + ): + return True + return False diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 318372d8..67e42fd7 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -8,10 +8,9 @@ from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst as cst from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer, ContextAwareVisitor -from libcst.codemod.visitors._gather_exports import GatherExportsVisitor +from libcst.codemod.visitors._gather_unused_imports import GatherUnusedImportsVisitor from libcst.helpers import get_absolute_module_for_import, get_full_name_for_node -from libcst.metadata import Assignment, Scope, ScopeProvider -from libcst.metadata.scope_provider import _gen_dotted_names +from libcst.metadata import Assignment, ProviderT, ScopeProvider class RemovedNodeVisitor(ContextAwareVisitor): @@ -119,7 +118,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): object appears in an ``__any__`` list. This is one of the transforms that is available automatically to you when running - a codemod. To use it in this manner, importi + a codemod. To use it in this manner, import :class:`~libcst.codemod.visitors.RemoveImportsVisitor` and then call the static :meth:`~libcst.codemod.visitors.RemoveImportsVisitor.remove_unused_import` method, giving it the current context (found as ``self.context`` for all subclasses of @@ -173,7 +172,9 @@ class RemoveImportsVisitor(ContextAwareTransformer): """ CONTEXT_KEY = "RemoveImportsVisitor" - METADATA_DEPENDENCIES = (ScopeProvider,) + METADATA_DEPENDENCIES: Tuple[ProviderT] = ( + *GatherUnusedImportsVisitor.METADATA_DEPENDENCIES, + ) @staticmethod def _get_imports_from_context( @@ -279,48 +280,24 @@ class RemoveImportsVisitor(ContextAwareTransformer): module: alias for module, obj, alias in all_unused_imports if obj is None } self.unused_obj_imports: Dict[str, Set[Tuple[str, Optional[str]]]] = {} - self.exported_objects: Set[str] = set() for module, obj, alias in all_unused_imports: if obj is None: continue if module not in self.unused_obj_imports: self.unused_obj_imports[module] = set() self.unused_obj_imports[module].add((obj, alias)) + self._unused_imports: Dict[ + cst.ImportAlias, Union[cst.Import, cst.ImportFrom] + ] = {} def visit_Module(self, node: cst.Module) -> None: - object_visitor = GatherExportsVisitor(self.context) - node.visit(object_visitor) - self.exported_objects = object_visitor.explicit_exported_objects - - def _is_in_use(self, scope: Scope, alias: cst.ImportAlias) -> bool: - # Grab the string name of this alias from the point of view of this module. - asname = alias.asname - names = _gen_dotted_names( - cst.ensure_type(asname.name, cst.Name) if asname is not None else alias.name - ) - - for name_or_alias, _ in names: - if name_or_alias in self.exported_objects: - return True - - for assignment in scope[name_or_alias]: - if ( - isinstance(assignment, Assignment) - and isinstance(assignment.node, (cst.ImportFrom, cst.Import)) - and len(assignment.references) > 0 - ): - return True - return False + visitor = GatherUnusedImportsVisitor(self.context) + node.visit(visitor) + self._unused_imports = {k: v for (k, v) in visitor.unused_imports} def leave_Import( self, original_node: cst.Import, updated_node: cst.Import ) -> Union[cst.Import, cst.RemovalSentinel]: - # Grab the scope for this import. If we don't have scope, we can't determine - # whether this import is unused so it is unsafe to remove. - scope = self.get_metadata(ScopeProvider, original_node, None) - if scope is None: - return updated_node - names_to_keep = [] for import_alias in original_node.names: if import_alias.evaluated_name not in self.unused_module_imports: @@ -339,7 +316,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): # Now that we know we want to remove this module, figure out if # there are any live references to it. - if self._is_in_use(scope, import_alias): + if import_alias not in self._unused_imports: names_to_keep.append(import_alias) continue @@ -363,13 +340,6 @@ class RemoveImportsVisitor(ContextAwareTransformer): def leave_ImportFrom( self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom ) -> Union[cst.ImportFrom, cst.RemovalSentinel]: - # Grab the scope for this import. If we don't have scope, we can't determine - # whether this import is unused so it is unsafe to remove. - scope = self.get_metadata(ScopeProvider, original_node, None) - if scope is None: - return updated_node - - # Make sure we have anything to do with this node. names = original_node.names if isinstance(names, cst.ImportStar): # This is a star import, so we won't remove it. @@ -400,7 +370,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): # Now that we know we want to remove this object, figure out if # there are any live references to it. - if self._is_in_use(scope, import_alias): + if import_alias not in self._unused_imports: names_to_keep.append(import_alias) continue diff --git a/libcst/codemod/visitors/tests/test_gather_comments.py b/libcst/codemod/visitors/tests/test_gather_comments.py new file mode 100644 index 00000000..da93823e --- /dev/null +++ b/libcst/codemod/visitors/tests/test_gather_comments.py @@ -0,0 +1,46 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +from libcst import Comment, MetadataWrapper, parse_module +from libcst.codemod import CodemodContext, CodemodTest +from libcst.codemod.visitors import GatherCommentsVisitor +from libcst.testing.utils import UnitTest + + +class TestGatherCommentsVisitor(UnitTest): + def gather_comments(self, code: str) -> GatherCommentsVisitor: + mod = MetadataWrapper(parse_module(CodemodTest.make_fixture_data(code))) + mod.resolve_many(GatherCommentsVisitor.METADATA_DEPENDENCIES) + instance = GatherCommentsVisitor( + CodemodContext(wrapper=mod), r".*\Wnoqa(\W.*)?$" + ) + mod.visit(instance) + return instance + + def test_no_comments(self) -> None: + visitor = self.gather_comments( + """ + def foo() -> None: + pass + """ + ) + self.assertEqual(visitor.comments, {}) + + def test_noqa_comments(self) -> None: + visitor = self.gather_comments( + """ + import a.b.c # noqa + import d # somethingelse + # noqa + def foo() -> None: + pass + + """ + ) + self.assertEqual(visitor.comments.keys(), {1, 4}) + self.assertTrue(isinstance(visitor.comments[1], Comment)) + self.assertEqual(visitor.comments[1].value, "# noqa") + self.assertTrue(isinstance(visitor.comments[4], Comment)) + self.assertEqual(visitor.comments[4].value, "# noqa") diff --git a/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py b/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py new file mode 100644 index 00000000..25f2d070 --- /dev/null +++ b/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py @@ -0,0 +1,82 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +from libcst import MetadataWrapper, parse_module +from libcst.codemod import CodemodContext, CodemodTest +from libcst.codemod.visitors import GatherNamesFromStringAnnotationsVisitor +from libcst.testing.utils import UnitTest + + +class TestGatherNamesFromStringAnnotationsVisitor(UnitTest): + def gather_names(self, code: str) -> GatherNamesFromStringAnnotationsVisitor: + mod = MetadataWrapper(parse_module(CodemodTest.make_fixture_data(code))) + mod.resolve_many(GatherNamesFromStringAnnotationsVisitor.METADATA_DEPENDENCIES) + instance = GatherNamesFromStringAnnotationsVisitor(CodemodContext(wrapper=mod)) + mod.visit(instance) + return instance + + def test_no_annotations(self) -> None: + visitor = self.gather_names( + """ + def foo() -> None: + pass + """ + ) + self.assertEqual(visitor.names, set()) + + def test_simple_string_annotations(self) -> None: + visitor = self.gather_names( + """ + def foo() -> "None": + pass + """ + ) + self.assertEqual(visitor.names, {"None"}) + + def test_concatenated_string_annotations(self) -> None: + visitor = self.gather_names( + """ + def foo() -> "No" "ne": + pass + """ + ) + self.assertEqual(visitor.names, {"None"}) + + def test_typevars(self) -> None: + visitor = self.gather_names( + """ + from typing import TypeVar as SneakyBastard + V = SneakyBastard("V", bound="int") + """ + ) + self.assertEqual(visitor.names, {"V", "int"}) + + def test_complex(self) -> None: + visitor = self.gather_names( + """ + from typing import TypeVar, TYPE_CHECKING + if TYPE_CHECKING: + from a import Container, Item + def foo(a: "A") -> "Item": + pass + A = TypeVar("A", bound="Container[Item]") + class X: + var: "ThisIsExpensiveToImport" # noqa + """ + ) + self.assertEqual( + visitor.names, {"A", "Item", "Container", "ThisIsExpensiveToImport"} + ) + + def test_dotted_names(self) -> None: + visitor = self.gather_names( + """ + a: "api.http_exceptions.HttpException" + """ + ) + self.assertEqual( + visitor.names, + {"api", "api.http_exceptions", "api.http_exceptions.HttpException"}, + ) diff --git a/libcst/codemod/visitors/tests/test_gather_unused_imports.py b/libcst/codemod/visitors/tests/test_gather_unused_imports.py new file mode 100644 index 00000000..bd63f26d --- /dev/null +++ b/libcst/codemod/visitors/tests/test_gather_unused_imports.py @@ -0,0 +1,128 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +from typing import Set + +from libcst import MetadataWrapper, parse_module +from libcst.codemod import CodemodContext, CodemodTest +from libcst.codemod.visitors import GatherUnusedImportsVisitor +from libcst.testing.utils import UnitTest + + +class TestGatherUnusedImportsVisitor(UnitTest): + def gather_imports(self, code: str) -> Set[str]: + mod = MetadataWrapper(parse_module(CodemodTest.make_fixture_data(code))) + mod.resolve_many(GatherUnusedImportsVisitor.METADATA_DEPENDENCIES) + instance = GatherUnusedImportsVisitor(CodemodContext(wrapper=mod)) + mod.visit(instance) + return set( + alias.evaluated_alias or alias.evaluated_name + for alias, _ in instance.unused_imports + ) + + def test_no_imports(self) -> None: + imports = self.gather_imports( + """ + foo = 1 + """ + ) + self.assertEqual(imports, set()) + + def test_dotted_imports(self) -> None: + imports = self.gather_imports( + """ + import a.b.c, d + import x.y + a.b(d) + """ + ) + self.assertEqual(imports, {"x.y"}) + + def test_alias(self) -> None: + imports = self.gather_imports( + """ + from bar import baz as baz_alias + import bar as bar_alias + bar_alias() + """ + ) + self.assertEqual(imports, {"baz_alias"}) + + def test_import_complex(self) -> None: + imports = self.gather_imports( + """ + import bar + import baz, qux + import a.b + import c.d + import x.y.z + import e.f as g + import h.i as j + + def foo() -> None: + c.d(qux) + x.u + j() + """ + ) + self.assertEqual(imports, {"bar", "baz", "a.b", "g"}) + + def test_import_from_complex(self) -> None: + imports = self.gather_imports( + """ + from bar import qux, quux + from a.b import c + from d.e import f + from h.i import j as k + from l.m import n as o + from x import * + + def foo() -> None: + f(qux) + k() + """ + ) + self.assertEqual(imports, {"quux", "c", "o"}) + + def test_exports(self) -> None: + imports = self.gather_imports( + """ + import a + __all__ = ["a"] + """ + ) + self.assertEqual(imports, set()) + + def test_string_annotation(self) -> None: + imports = self.gather_imports( + """ + from a import b + from c import d + import m, n.blah + foo: "b[int]" + bar: List["d"] + quux: List["m.blah"] + alma: List["n.blah"] + """ + ) + self.assertEqual(imports, set()) + + def test_typevars(self) -> None: + imports = self.gather_imports( + """ + from typing import TypeVar as Sneaky + from a import b + t = Sneaky("t", bound="b") + """ + ) + self.assertEqual(imports, set()) + + def test_future(self) -> None: + imports = self.gather_imports( + """ + from __future__ import cool_feature + """ + ) + self.assertEqual(imports, set()) From afdf74694bbb734ce9cf1b862dfd13079fce09b5 Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Tue, 4 Aug 2020 14:44:26 -0700 Subject: [PATCH 043/632] Add tox to requirements-dev.txt and fixed a typo in README (#357) --- README.rst | 2 +- requirements-dev.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 3096b354..22688034 100644 --- a/README.rst +++ b/README.rst @@ -169,7 +169,7 @@ this: .. code-block:: shell - python -m unitttest libcst.tests.test_batched_visitor + python -m unittest libcst.tests.test_batched_visitor See the `unittest documentation `_ for more examples of how to run tests. diff --git a/requirements-dev.txt b/requirements-dev.txt index 6edf95da..44925b02 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -11,3 +11,4 @@ nbsphinx==0.4.2 pyre-check==0.0.41 sphinx-rtd-theme==0.4.3 prompt-toolkit==2.0.9 +tox==3.18.1 From ffc4c93c826a76fffcffb36492ef35079ee05c2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Tue, 4 Aug 2020 16:42:52 -0700 Subject: [PATCH 044/632] Fix syntax error in .pyre_configuration.example (#359) Missing a comma --- .pyre_configuration.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pyre_configuration.example b/.pyre_configuration.example index 0550aae6..784a7958 100644 --- a/.pyre_configuration.example +++ b/.pyre_configuration.example @@ -7,6 +7,6 @@ ], "exclude": [ ".*/\\.tox/.*" - ] + ], "strict": true } From 2e8a0c6df7ab8142dd9ae828f2d6642ac2d81db8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Tue, 4 Aug 2020 17:33:22 -0700 Subject: [PATCH 045/632] Fix dotted names (#358) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary When importing things like `import os.path` and using it as `os.path.join("a", "b").lower()`, references ended up being in the `["os"]` assignment instead of `["os.path"]`. This fixes the problem by updating the dotted names generator in the scope provider· ## Test Plan ``` tox -e py37 ``` Co-authored-by: Germán Méndez Bravo --- libcst/metadata/scope_provider.py | 33 ++++++---- libcst/metadata/tests/test_scope_provider.py | 65 ++++++++++++++++++++ 2 files changed, 86 insertions(+), 12 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index bb0adc56..fdda406a 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -610,18 +610,27 @@ def _gen_dotted_names( yield node.value, node else: value = node.value - if not isinstance(value, (cst.Attribute, cst.Name)): - # this is not an import - return - name_values = _gen_dotted_names(value) - try: - next_name, next_node = next(name_values) - except StopIteration: - return - else: - yield f"{next_name}.{node.attr.value}", node - yield next_name, next_node - yield from name_values + if isinstance(value, cst.Call): + value = value.func + if isinstance(value, (cst.Attribute, cst.Name)): + name_values = _gen_dotted_names(value) + try: + next_name, next_node = next(name_values) + except StopIteration: + return + else: + yield next_name, next_node + yield from name_values + elif isinstance(value, (cst.Attribute, cst.Name)): + name_values = _gen_dotted_names(value) + try: + next_name, next_node = next(name_values) + except StopIteration: + return + else: + yield f"{next_name}.{node.attr.value}", node + yield next_name, next_node + yield from name_values class ScopeVisitor(cst.CSTVisitor): diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 31c1ac1b..a4f24591 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -21,6 +21,7 @@ from libcst.metadata.scope_provider import ( QualifiedNameSource, Scope, ScopeProvider, + _gen_dotted_names, ) from libcst.testing.utils import UnitTest, data_provider @@ -203,6 +204,39 @@ class ScopeProviderTest(UnitTest): self.assertEqual(list(scope_of_module["x.y"])[0].references, set()) self.assertEqual(scope_of_module.accesses["x.y"], set()) + def test_dotted_import_with_call_access(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + import os.path + os.path.join("A", "B").lower() + """ + ) + scope_of_module = scopes[m] + first_statement = ensure_type(m.body[1], cst.SimpleStatementLine) + attr = ensure_type( + ensure_type( + ensure_type( + ensure_type( + ensure_type(first_statement.body[0], cst.Expr).value, cst.Call + ).func, + cst.Attribute, + ).value, + cst.Call, + ).func, + cst.Attribute, + ).value + self.assertTrue("os.path" in scope_of_module) + self.assertTrue("os" in scope_of_module) + + os_path_join_assignment = cast(Assignment, list(scope_of_module["os.path"])[0]) + os_path_join_assignment_references = list(os_path_join_assignment.references) + self.assertNotEqual(len(os_path_join_assignment_references), 0) + os_path_join_access = os_path_join_assignment_references[0] + self.assertEqual(scope_of_module.accesses["os"], set()) + self.assertEqual(scope_of_module.accesses["os.path"], {os_path_join_access}) + self.assertEqual(scope_of_module.accesses["os.path.join"], set()) + self.assertEqual(os_path_join_access.node, attr) + def test_import_from(self) -> None: m, scopes = get_scope_metadata_provider( """ @@ -1153,3 +1187,34 @@ class ScopeProviderTest(UnitTest): scope.get_qualified_names_for("doesnt_exist") self.assertEqual(len(scope._assignments), assignments_len_before) self.assertEqual(len(scope._accesses), accesses_len_before) + + def test_gen_dotted_names(self) -> None: + names = {name for name, node in _gen_dotted_names(cst.Name(value="a"))} + self.assertEqual(names, {"a"}) + + names = { + name + for name, node in _gen_dotted_names( + cst.Attribute(value=cst.Name(value="a"), attr=cst.Name(value="b")) + ) + } + self.assertEqual(names, {"a.b", "a"}) + + names = { + name + for name, node in _gen_dotted_names( + cst.Attribute( + value=cst.Call( + func=cst.Attribute( + value=cst.Attribute( + value=cst.Name(value="a"), attr=cst.Name(value="b") + ), + attr=cst.Name(value="c"), + ), + args=[], + ), + attr=cst.Name(value="d"), + ) + ) + } + self.assertEqual(names, {"a.b.c", "a.b", "a"}) From 17bde3b3aa0b2cb1b6aaa4910d7a8ee52df3c203 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Tue, 4 Aug 2020 17:33:55 -0700 Subject: [PATCH 046/632] Fix pyre syntax error in tokenizer stub (#360) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary The pyre stub for the tokenizer module had a syntax error. Fixing it removes other pyre errors. ## Test Plan ``` pyre check ``` Co-authored-by: Germán Méndez Bravo --- libcst/_nodes/expression.py | 3 --- libcst/_parser/conversions/expression.py | 3 --- libcst/_parser/detect_config.py | 1 - stubs/tokenize.pyi | 4 ++-- 4 files changed, 2 insertions(+), 9 deletions(-) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index b58ec233..7975a330 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -426,7 +426,6 @@ class Integer(BaseNumber): def _validate(self) -> None: super(Integer, self)._validate() - # pyre-fixme[16]: Module `tokenize` has no attribute `Intnumber`. if not re.fullmatch(INTNUMBER_RE, self.value): raise CSTValidationError("Number is not a valid integer.") @@ -465,7 +464,6 @@ class Float(BaseNumber): def _validate(self) -> None: super(Float, self)._validate() - # pyre-fixme[16]: Module `tokenize` has no attribute `Floatnumber`. if not re.fullmatch(FLOATNUMBER_RE, self.value): raise CSTValidationError("Number is not a valid float.") @@ -503,7 +501,6 @@ class Imaginary(BaseNumber): def _validate(self) -> None: super(Imaginary, self)._validate() - # pyre-fixme[16]: Module `tokenize` has no attribute `Imagnumber`. if not re.fullmatch(IMAGNUMBER_RE, self.value): raise CSTValidationError("Number is not a valid imaginary.") diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index e7c76a92..1642d42a 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -862,13 +862,10 @@ def convert_atom_basic( elif child.type.name == "NUMBER": # We must determine what type of number it is since we split node # types up this way. - # pyre-fixme[16]: Module `tokenize` has no attribute `Intnumber`. if re.fullmatch(INTNUMBER_RE, child.string): return WithLeadingWhitespace(Integer(child.string), child.whitespace_before) - # pyre-fixme[16]: Module `tokenize` has no attribute `Floatnumber`. elif re.fullmatch(FLOATNUMBER_RE, child.string): return WithLeadingWhitespace(Float(child.string), child.whitespace_before) - # pyre-fixme[16]: Module `tokenize` has no attribute `Imagnumber`. elif re.fullmatch(IMAGNUMBER_RE, child.string): return WithLeadingWhitespace( Imaginary(child.string), child.whitespace_before diff --git a/libcst/_parser/detect_config.py b/libcst/_parser/detect_config.py index 0748762e..0209d0a2 100644 --- a/libcst/_parser/detect_config.py +++ b/libcst/_parser/detect_config.py @@ -47,7 +47,6 @@ def _detect_encoding(source: Union[str, bytes]) -> str: if isinstance(source, str): return "utf-8" - # pyre-fixme[16]: Module `tokenize` has no attribute `detect_encoding`. return py_tokenize_detect_encoding(BytesIO(source).readline)[0] diff --git a/stubs/tokenize.pyi b/stubs/tokenize.pyi index a6f50eea..284dbf36 100644 --- a/stubs/tokenize.pyi +++ b/stubs/tokenize.pyi @@ -64,7 +64,7 @@ from token import ( VBAR, VBAREQUAL, ) -from typing import Callable, Sequence, Tuple +from typing import Callable, Generator, Sequence, Tuple Hexnumber: str = ... @@ -94,4 +94,4 @@ class TokenInfo(Tuple[int, str, Tuple[int, int], Tuple[int, int], int]): def __repr__(self) -> str: ... def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ... -def tokenize(Callable[[], bytes]) -> TokenInfo: ... +def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... From 2e788a25dc5a182f4977a012340ee15f29be586e Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Wed, 5 Aug 2020 12:57:37 -0700 Subject: [PATCH 047/632] [doc] use cst node in ensure_type in best practice doc (#361) --- docs/source/best_practices.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/best_practices.rst b/docs/source/best_practices.rst index 204c2ec9..a4633d5e 100644 --- a/docs/source/best_practices.rst +++ b/docs/source/best_practices.rst @@ -80,7 +80,7 @@ You could instead write something like:: def visit_Call(self, node: cst.Call) -> None: # Store all calls in a stack if m.matches(node.func, m.Name()): - self.call_stack.append(cst.ensure_type(node.func, m.Name).value) + self.call_stack.append(cst.ensure_type(node.func, cst.Name).value) def leave_Call(self, original_node: cst.Call) -> None: # Pop the latest call off the stack From 3e66bdd957ba7f8c19a18c3549c4cd0bcf92bef7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Wed, 5 Aug 2020 16:26:53 -0700 Subject: [PATCH 048/632] Fix enclosing attribute for attributes in call arguments Fixes enclosed arguments like `c.d` in `x.y(c.d()).z()` were badly being resolved as `x.y` instead. This also clarifies the intent in `infer_accesses()` so it no longer shadows variable `name` and also fixes the case where no node is actually found in the scope. --- .../tests/test_remove_unused_imports.py | 10 ++++++++ libcst/metadata/scope_provider.py | 25 +++++++++++++------ 2 files changed, 27 insertions(+), 8 deletions(-) diff --git a/libcst/codemod/commands/tests/test_remove_unused_imports.py b/libcst/codemod/commands/tests/test_remove_unused_imports.py index c685f5c8..23b1c727 100644 --- a/libcst/codemod/commands/tests/test_remove_unused_imports.py +++ b/libcst/codemod/commands/tests/test_remove_unused_imports.py @@ -77,6 +77,16 @@ class RemoveUnusedImportsCommandTest(CodemodTest): self.assertCodemod(before, after) + def test_enclosed_attributes(self) -> None: + before = """ + from a.b import c + import x + + def foo() -> None: + x.y(c.d()).z() + """ + self.assertCodemod(before, before) + def test_access_in_assignment(self) -> None: before = """ from a import b diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index fdda406a..3802c3c9 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -639,7 +639,7 @@ class ScopeVisitor(cst.CSTVisitor): self.provider: ScopeProvider = provider self.scope: Scope = GlobalScope() self.__deferred_accesses: List[Tuple[Access, Optional[cst.Attribute]]] = [] - self.__top_level_attribute: Optional[cst.Attribute] = None + self.__top_level_attribute_stack: List[Optional[cst.Attribute]] = [None] @contextmanager def _new_scope( @@ -686,13 +686,19 @@ class ScopeVisitor(cst.CSTVisitor): return self._visit_import_alike(node) def visit_Attribute(self, node: cst.Attribute) -> Optional[bool]: - if self.__top_level_attribute is None: - self.__top_level_attribute = node + if self.__top_level_attribute_stack[-1] is None: + self.__top_level_attribute_stack[-1] = node node.value.visit(self) # explicitly not visiting attr - if self.__top_level_attribute is node: - self.__top_level_attribute = None + if self.__top_level_attribute_stack[-1] is node: + self.__top_level_attribute_stack[-1] = None return False + def visit_Call(self, node: cst.Call) -> Optional[bool]: + self.__top_level_attribute_stack.append(None) + + def leave_Call(self, original_node: cst.Call) -> None: + self.__top_level_attribute_stack.pop() + def visit_Name(self, node: cst.Name) -> Optional[bool]: # not all Name have ExpressionContext context = self.provider.get_metadata(ExpressionContextProvider, node, None) @@ -700,7 +706,9 @@ class ScopeVisitor(cst.CSTVisitor): self.scope.record_assignment(node.value, node) elif context in (ExpressionContext.LOAD, ExpressionContext.DEL): access = Access(node, self.scope) - self.__deferred_accesses.append((access, self.__top_level_attribute)) + self.__deferred_accesses.append( + (access, self.__top_level_attribute_stack[-1]) + ) def visit_FunctionDef(self, node: cst.FunctionDef) -> Optional[bool]: self.scope.record_assignment(node.name.value, node) @@ -842,9 +850,10 @@ class ScopeVisitor(cst.CSTVisitor): if enclosing_attribute is not None: # if _gen_dotted_names doesn't generate any values, fall back to # the original name node above - for name, node in _gen_dotted_names(enclosing_attribute): - if name in access.scope: + for attr_name, node in _gen_dotted_names(enclosing_attribute): + if attr_name in access.scope: access.node = node + name = attr_name break scope_name_accesses[(access.scope, name)].add(access) From b4032a32320ab635d89c439287d60f03191db7df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Fri, 7 Aug 2020 11:11:57 -0700 Subject: [PATCH 049/632] Fix pyre error: kwonlydefaults is Optional --- libcst/codemod/_command.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/libcst/codemod/_command.py b/libcst/codemod/_command.py index 4529be00..1a11e91e 100644 --- a/libcst/codemod/_command.py +++ b/libcst/codemod/_command.py @@ -162,20 +162,16 @@ class MagicArgsCodemodCommand(CodemodCommand, ABC): ) # No default, but we found something in scratch. So, forward it. args.append(self.context.scratch[arg]) + kwonlydefaults = argspec.kwonlydefaults or {} for kwarg in argspec.kwonlyargs: - if ( - kwarg not in self.context.scratch - and kwarg not in argspec.kwonlydefaults - ): + if kwarg not in self.context.scratch and kwarg not in kwonlydefaults: raise KeyError( f"Visitor {transform.__name__} requires keyword arg {kwarg} but " + "it is not in our context nor does it have a default! It should " + "be provided by an argument returned from the 'add_args' method " + "or populated into context.scratch by a previous transform!" ) - kwargs[kwarg] = self.context.scratch.get( - kwarg, argspec.kwonlydefaults[kwarg] - ) + kwargs[kwarg] = self.context.scratch.get(kwarg, kwonlydefaults[kwarg]) # Return an instance of the transform with those arguments return transform(self.context, *args, **kwargs) From 7fdae6ce6d72cbb6a7fa0b3b106a183cc9a206e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Fri, 7 Aug 2020 11:11:57 -0700 Subject: [PATCH 050/632] Bump to version 0.3.9 --- CHANGELOG.md | 11 +++++++++++ libcst/_version.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 45bbc2a2..2f936ef8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +# 0.3.9 - 2020-09-07 + +## Added + - Support string type annotations in RemoveUnusedImports [#353](https://github.com/Instagram/LibCST/issues/353) + - Add scope to ImportAlias [#350](https://github.com/Instagram/LibCST/issues/350) + - Add scope to ClassDef [#349](https://github.com/Instagram/LibCST/issues/349) + +## Fixed + - Fixed all pyre related errors [#360](https://github.com/Instagram/LibCST/issues/360) + - Fixed enclosing attribute for attributes in call arguments [#362](https://github.com/Instagram/LibCST/issues/362) + # 0.3.8 - 2020-07-22 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index 87ce5f25..2adb8c1c 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.8" +LIBCST_VERSION: str = "0.3.9" From 771d5e14b792007e576010ae6cd96bf045ebf2c2 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Tue, 11 Aug 2020 17:44:16 -0700 Subject: [PATCH 051/632] Include missing test data in install (#365) --- setup.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c21dc8b2..f92086b9 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,11 @@ setuptools.setup( url="https://github.com/Instagram/LibCST", license="MIT", packages=setuptools.find_packages(), - package_data={"libcst": ["py.typed"]}, + package_data={ + "libcst": ["py.typed"], + "libcst.tests.pyre": ["*"], + "libcst.codemod.tests": ["*"], + }, test_suite="libcst", python_requires=">=3.6", install_requires=[ From b4e04eae63ff6f2605975b3854b40e965cacc7b0 Mon Sep 17 00:00:00 2001 From: Sebastian Kreft Date: Wed, 12 Aug 2020 09:55:11 -0400 Subject: [PATCH 052/632] fix: spaces around walrus operator are not required (#368) Fixes #367 --- libcst/_nodes/expression.py | 13 ------------- libcst/_nodes/tests/test_namedexpr.py | 14 ++++++++++++++ 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 7975a330..679d7b94 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -3680,19 +3680,6 @@ class NamedExpr(BaseExpression): #: Whitespace after the walrus operator, but before the value. whitespace_after_walrus: BaseParenthesizableWhitespace = SimpleWhitespace.field(" ") - def _validate(self) -> None: - super(NamedExpr, self)._validate() - if ( - self.whitespace_before_walrus.empty - and not self.target._safe_to_use_with_word_operator(ExpressionPosition.LEFT) - ): - raise CSTValidationError("Must have at least one space after target.") - if ( - self.whitespace_after_walrus.empty - and not self.value._safe_to_use_with_word_operator(ExpressionPosition.RIGHT) - ): - raise CSTValidationError("Must have at least one space before value.") - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "NamedExpr": return NamedExpr( lpar=visit_sequence(self, "lpar", self.lpar, visitor), diff --git a/libcst/_nodes/tests/test_namedexpr.py b/libcst/_nodes/tests/test_namedexpr.py index b0eccbf1..4ba1485b 100644 --- a/libcst/_nodes/tests/test_namedexpr.py +++ b/libcst/_nodes/tests/test_namedexpr.py @@ -87,6 +87,20 @@ class NamedExprTest(CSTNodeTest): "parser": _parse_statement_force_38, "expected_position": None, }, + { + "node": cst.If( + test=cst.NamedExpr( + target=cst.Name(value="x"), + value=cst.Integer(value="1"), + whitespace_before_walrus=cst.SimpleWhitespace(""), + whitespace_after_walrus=cst.SimpleWhitespace(""), + ), + body=cst.SimpleStatementSuite(body=[cst.Pass()]), + ), + "code": "if x:=1: pass\n", + "parser": _parse_statement_force_38, + "expected_position": None, + }, ) ) def test_valid(self, **kwargs: Any) -> None: From 0c09c9dfbbe33ba35e879361add96fee8fdd85c0 Mon Sep 17 00:00:00 2001 From: Shannon Zhu Date: Thu, 13 Aug 2020 08:52:02 -0700 Subject: [PATCH 053/632] Call pyre query with noninteractive logging (#371) Since the stdout/err is being captured from this invocation, using the noninteractive flag will make the output much more readable and avoid special characters for terminal pretty-printing. --- libcst/metadata/type_inference_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index fdbb6611..7cb7da28 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -57,7 +57,7 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): root_path: Path, paths: List[str], timeout: Optional[int] ) -> Mapping[str, object]: params = ",".join(f"path='{root_path / path}'" for path in paths) - cmd = f'''pyre query "types({params})"''' + cmd = f'''pyre --noninteractive query "types({params})"''' try: stdout, stderr, return_code = run_command(cmd, timeout=timeout) except subprocess.TimeoutExpired as exc: From 3ada79ebcb14224e0c0cd4e40a622540656cc0e3 Mon Sep 17 00:00:00 2001 From: Sebastian Kreft Date: Thu, 13 Aug 2020 21:50:36 -0400 Subject: [PATCH 054/632] fix: correctly extract wildcard matchers (#355) * fix: correctly extract wildcard matchers Fixes #337 and #338 * refactor: use data classes instead of bare tuples --- libcst/matchers/_matcher_base.py | 124 ++++++++++++++++---------- libcst/matchers/tests/test_extract.py | 22 +++++ 2 files changed, 100 insertions(+), 46 deletions(-) diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 16c16d5b..27475d5b 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -7,7 +7,7 @@ import collections.abc import copy import inspect import re -from dataclasses import fields +from dataclasses import dataclass, fields from enum import Enum, auto from typing import ( Callable, @@ -932,6 +932,16 @@ def SaveMatchedNode(matcher: _OtherNodeT, name: str) -> _OtherNodeT: return cast(_OtherNodeT, _ExtractMatchingNode(matcher, name)) +@dataclass(frozen=True) +class _SequenceMatchesResult: + sequence_capture: Optional[ + Dict[str, Union[libcst.CSTNode, Sequence[libcst.CSTNode]]] + ] + matched_nodes: Optional[ + Union[libcst.CSTNode, MaybeSentinel, Sequence[libcst.CSTNode]] + ] + + def _sequence_matches( # noqa: C901 nodes: Sequence[Union[MaybeSentinel, libcst.CSTNode]], matchers: Sequence[ @@ -944,30 +954,35 @@ def _sequence_matches( # noqa: C901 ] ], metadata_lookup: Callable[[meta.ProviderT, libcst.CSTNode], object], -) -> Optional[Dict[str, Union[libcst.CSTNode, Sequence[libcst.CSTNode]]]]: +) -> _SequenceMatchesResult: if not nodes and not matchers: - # Base case, empty lists are alwatys matches - return {} + # Base case, empty lists are always matches + return _SequenceMatchesResult({}, None) if not nodes and matchers: # Base case, we have one or more matcher that wasn't matched return ( - {} + _SequenceMatchesResult({}, []) if all( (isinstance(m, AtLeastN) and m.n == 0) or isinstance(m, AtMostN) for m in matchers ) - else None + else _SequenceMatchesResult(None, None) ) if nodes and not matchers: # Base case, we have nodes left that don't match any matcher - return None + return _SequenceMatchesResult(None, None) # Recursive case, nodes and matchers LHS matches node = nodes[0] matcher = matchers[0] if isinstance(matcher, DoNotCareSentinel): # We don't care about the value for this node. - return _sequence_matches(nodes[1:], matchers[1:], metadata_lookup) + return _SequenceMatchesResult( + _sequence_matches( + nodes[1:], matchers[1:], metadata_lookup + ).sequence_capture, + node, + ) elif isinstance(matcher, _BaseWildcardNode): if isinstance(matcher, AtMostN): if matcher.n > 0: @@ -977,18 +992,24 @@ def _sequence_matches( # noqa: C901 nodes[0], matcher.matcher, metadata_lookup ) if attribute_capture is not None: - sequence_capture = _sequence_matches( + result = _sequence_matches( nodes[1:], [AtMostN(matcher.matcher, n=matcher.n - 1), *matchers[1:]], metadata_lookup, ) - if sequence_capture is not None: - return {**attribute_capture, **sequence_capture} + if result.sequence_capture is not None: + return _SequenceMatchesResult( + {**attribute_capture, **result.sequence_capture}, + (node, *result.matched_nodes), + ) # Finally, assume that this does not match the current node. # Consume the matcher but not the node. - sequence_capture = _sequence_matches(nodes, matchers[1:], metadata_lookup) - if sequence_capture is not None: - return sequence_capture + return _SequenceMatchesResult( + _sequence_matches( + nodes, matchers[1:], metadata_lookup + ).sequence_capture, + (), + ) elif isinstance(matcher, AtLeastN): if matcher.n > 0: # Only match if we can consume one of the matches, since we still @@ -997,13 +1018,17 @@ def _sequence_matches( # noqa: C901 nodes[0], matcher.matcher, metadata_lookup ) if attribute_capture is not None: - sequence_capture = _sequence_matches( + result = _sequence_matches( nodes[1:], [AtLeastN(matcher.matcher, n=matcher.n - 1), *matchers[1:]], metadata_lookup, ) - if sequence_capture is not None: - return {**attribute_capture, **sequence_capture} + if result.sequence_capture is not None: + return _SequenceMatchesResult( + {**attribute_capture, **result.sequence_capture}, + (node, *result.matched_nodes), + ) + return _SequenceMatchesResult(None, None) else: # First, assume that this does match a node (greedy). # Consume one node since it matched this matcher. @@ -1011,45 +1036,52 @@ def _sequence_matches( # noqa: C901 nodes[0], matcher.matcher, metadata_lookup ) if attribute_capture is not None: - sequence_capture = _sequence_matches( - nodes[1:], matchers, metadata_lookup - ) - if sequence_capture is not None: - return {**attribute_capture, **sequence_capture} + result = _sequence_matches(nodes[1:], matchers, metadata_lookup) + if result.sequence_capture is not None: + return _SequenceMatchesResult( + {**attribute_capture, **result.sequence_capture}, + (node, *result.matched_nodes), + ) # Now, assume that this does not match the current node. # Consume the matcher but not the node. - sequence_capture = _sequence_matches( - nodes, matchers[1:], metadata_lookup + return _SequenceMatchesResult( + _sequence_matches( + nodes, matchers[1:], metadata_lookup + ).sequence_capture, + (), ) - if sequence_capture is not None: - return sequence_capture else: # There are no other types of wildcard consumers, but we're making # pyre happy with that fact. raise Exception(f"Logic error unrecognized wildcard {type(matcher)}!") elif isinstance(matcher, _ExtractMatchingNode): # See if the raw matcher matches. If it does, capture the sequence we matched and store it. - sequence_capture = _sequence_matches( + result = _sequence_matches( nodes, [matcher.matcher, *matchers[1:]], metadata_lookup ) - if sequence_capture is not None: - return { - # Our own match capture comes first, since we wnat to allow the same - # name later in the sequence to override us. - matcher.name: nodes, - **sequence_capture, - } - return None + if result.sequence_capture is not None: + return _SequenceMatchesResult( + { + # Our own match capture comes first, since we wnat to allow the same + # name later in the sequence to override us. + matcher.name: result.matched_nodes, + **result.sequence_capture, + }, + result.matched_nodes, + ) + return _SequenceMatchesResult(None, None) match_capture = _matches(node, matcher, metadata_lookup) if match_capture is not None: # These values match directly - sequence_capture = _sequence_matches(nodes[1:], matchers[1:], metadata_lookup) - if sequence_capture is not None: - return {**match_capture, **sequence_capture} + result = _sequence_matches(nodes[1:], matchers[1:], metadata_lookup) + if result.sequence_capture is not None: + return _SequenceMatchesResult( + {**match_capture, **result.sequence_capture}, node + ) # Failed recursive case, no match - return None + return _SequenceMatchesResult(None, None) _AttributeValueT = Optional[Union[MaybeSentinel, libcst.CSTNode, str, bool]] @@ -1110,9 +1142,9 @@ def _attribute_matches( # noqa: C901 for m in matcher.options: if isinstance(m, collections.abc.Sequence): # Should match the sequence of requested nodes - sequence_capture = _sequence_matches(node, m, metadata_lookup) - if sequence_capture is not None: - return sequence_capture + result = _sequence_matches(node, m, metadata_lookup) + if result.sequence_capture is not None: + return result.sequence_capture elif isinstance(m, MatchIfTrue): return {} if matcher.func(node) else None elif isinstance(matcher, AllOf): @@ -1121,10 +1153,10 @@ def _attribute_matches( # noqa: C901 for m in matcher.options: if isinstance(m, collections.abc.Sequence): # Should match the sequence of requested nodes - sequence_capture = _sequence_matches(node, m, metadata_lookup) - if sequence_capture is None: + result = _sequence_matches(node, m, metadata_lookup) + if result.sequence_capture is None: return None - all_captures = {**all_captures, **sequence_capture} + all_captures = {**all_captures, **result.sequence_capture} elif isinstance(m, MatchIfTrue): return {} if matcher.func(node) else None else: @@ -1150,7 +1182,7 @@ def _attribute_matches( # noqa: C901 matcher, ), metadata_lookup, - ) + ).sequence_capture # We exhausted our possibilities, there's no match return None diff --git a/libcst/matchers/tests/test_extract.py b/libcst/matchers/tests/test_extract.py index 2bf45b91..5c3cf12a 100644 --- a/libcst/matchers/tests/test_extract.py +++ b/libcst/matchers/tests/test_extract.py @@ -404,3 +404,25 @@ class MatchersExtractTest(UnitTest): ), ) self.assertIsNone(nodes) + + def test_extract_sequence_multiple_wildcards(self) -> None: + expression = cst.parse_expression("1, 2, 3, 4") + nodes = m.extract( + expression, + m.Tuple( + elements=( + m.SaveMatchedNode(m.ZeroOrMore(), "head"), + m.SaveMatchedNode(m.Element(value=m.Integer(value="3")), "element"), + m.SaveMatchedNode(m.ZeroOrMore(), "tail"), + ) + ), + ) + tuple_elements = cst.ensure_type(expression, cst.Tuple).elements + self.assertEqual( + nodes, + { + "head": tuple(tuple_elements[:2]), + "element": tuple_elements[2], + "tail": tuple(tuple_elements[3:]), + }, + ) From e43ef3e20417033bcd27b49b8ba5d2be55882820 Mon Sep 17 00:00:00 2001 From: Sebastian Kreft Date: Fri, 14 Aug 2020 00:19:14 -0400 Subject: [PATCH 055/632] fix: SaveMachedNode now matches with trailing empty wildcards (#356) * fix: SaveMachedNode now matches with trailing empty wildcards Note that SaveMatchedNode was already matching leading empty wildcards, however it's value is incorect due to #337. This is why the test for the leading wildcards are failing, and will be so until #355 is merged. This fixes #336. * fix: _matches_zero_nodes type declaration --- libcst/matchers/_matcher_base.py | 31 +++++++++++++++++++++------ libcst/matchers/tests/test_extract.py | 28 ++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 7 deletions(-) diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 27475d5b..0cf281c0 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -932,6 +932,24 @@ def SaveMatchedNode(matcher: _OtherNodeT, name: str) -> _OtherNodeT: return cast(_OtherNodeT, _ExtractMatchingNode(matcher, name)) +def _matches_zero_nodes( + matcher: Union[ + BaseMatcherNode, + _BaseWildcardNode, + MatchIfTrue[Callable[[object], bool]], + _BaseMetadataMatcher, + DoNotCareSentinel, + ] +) -> bool: + if isinstance(matcher, AtLeastN) and matcher.n == 0: + return True + if isinstance(matcher, AtMostN): + return True + if isinstance(matcher, _ExtractMatchingNode): + return _matches_zero_nodes(matcher.matcher) + return False + + @dataclass(frozen=True) class _SequenceMatchesResult: sequence_capture: Optional[ @@ -960,14 +978,13 @@ def _sequence_matches( # noqa: C901 return _SequenceMatchesResult({}, None) if not nodes and matchers: # Base case, we have one or more matcher that wasn't matched - return ( - _SequenceMatchesResult({}, []) - if all( - (isinstance(m, AtLeastN) and m.n == 0) or isinstance(m, AtMostN) - for m in matchers + if all(_matches_zero_nodes(m) for m in matchers): + return _SequenceMatchesResult( + {m.name: () for m in matchers if isinstance(m, _ExtractMatchingNode)}, + (), ) - else _SequenceMatchesResult(None, None) - ) + else: + return _SequenceMatchesResult(None, None) if nodes and not matchers: # Base case, we have nodes left that don't match any matcher return _SequenceMatchesResult(None, None) diff --git a/libcst/matchers/tests/test_extract.py b/libcst/matchers/tests/test_extract.py index 5c3cf12a..77c134a8 100644 --- a/libcst/matchers/tests/test_extract.py +++ b/libcst/matchers/tests/test_extract.py @@ -322,6 +322,34 @@ class MatchersExtractTest(UnitTest): ) self.assertEqual(nodes, {}) + def test_extract_optional_wildcard_head(self) -> None: + expression = cst.parse_expression("[3]") + nodes = m.extract( + expression, + m.List( + elements=[ + m.SaveMatchedNode(m.ZeroOrMore(), "head1"), + m.SaveMatchedNode(m.ZeroOrMore(), "head2"), + m.Element(value=m.Integer(value="3")), + ] + ), + ) + self.assertEqual(nodes, {"head1": (), "head2": ()}) + + def test_extract_optional_wildcard_tail(self) -> None: + expression = cst.parse_expression("[3]") + nodes = m.extract( + expression, + m.List( + elements=[ + m.Element(value=m.Integer(value="3")), + m.SaveMatchedNode(m.ZeroOrMore(), "tail1"), + m.SaveMatchedNode(m.ZeroOrMore(), "tail2"), + ] + ), + ) + self.assertEqual(nodes, {"tail1": (), "tail2": ()}) + def test_extract_optional_wildcard_present(self) -> None: expression = cst.parse_expression("a + b[c], d(e, f * g, h.i.j)") nodes = m.extract( From 85761b258208faa4cfd8e99c51ca62c59a9727a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Thu, 13 Aug 2020 09:54:03 -0700 Subject: [PATCH 056/632] Add is_annotation subtype for Access inreferences. Access references with this type come from an annotation. (cherry picked from commit 39427eaabce9dff949ccc3545fc4a5d7a2d10930) --- libcst/metadata/scope_provider.py | 14 ++++++++++++-- libcst/metadata/tests/test_scope_provider.py | 19 +++++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 3802c3c9..bcea03c2 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -63,11 +63,14 @@ class Access: #: assignment. scope: "Scope" + is_annotation: bool + __assignments: Set["BaseAssignment"] - def __init__(self, node: cst.Name, scope: "Scope") -> None: + def __init__(self, node: cst.Name, scope: "Scope", is_annotation: bool) -> None: self.node = node self.scope = scope + self.is_annotation = is_annotation self.__assignments = set() def __hash__(self) -> int: @@ -640,6 +643,7 @@ class ScopeVisitor(cst.CSTVisitor): self.scope: Scope = GlobalScope() self.__deferred_accesses: List[Tuple[Access, Optional[cst.Attribute]]] = [] self.__top_level_attribute_stack: List[Optional[cst.Attribute]] = [None] + self.__in_annotation: Set[Union[cst.Call, cst.Annotation]] = set() @contextmanager def _new_scope( @@ -699,13 +703,19 @@ class ScopeVisitor(cst.CSTVisitor): def leave_Call(self, original_node: cst.Call) -> None: self.__top_level_attribute_stack.pop() + def visit_Annotation(self, node: cst.Annotation) -> Optional[bool]: + self.__in_annotation.add(node) + + def leave_Annotation(self, original_node: cst.Annotation) -> None: + self.__in_annotation.discard(original_node) + def visit_Name(self, node: cst.Name) -> Optional[bool]: # not all Name have ExpressionContext context = self.provider.get_metadata(ExpressionContextProvider, node, None) if context == ExpressionContext.STORE: self.scope.record_assignment(node.value, node) elif context in (ExpressionContext.LOAD, ExpressionContext.DEL): - access = Access(node, self.scope) + access = Access(node, self.scope, is_annotation=bool(self.__in_annotation)) self.__deferred_accesses.append( (access, self.__top_level_attribute_stack[-1]) ) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index a4f24591..bee13d50 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1010,6 +1010,25 @@ class ScopeProviderTest(UnitTest): self.assertEqual(len(set(scopes.values())), 3) + def test_annotation_access(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + from t import T + def f(t: T): + pass + """ + ) + imp = ensure_type( + ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.ImportFrom + ) + scope = scopes[imp] + assignments = list(scope["T"]) + assignment = assignments[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertTrue(references[0].is_annotation) + def test_node_of_scopes(self) -> None: m, scopes = get_scope_metadata_provider( """ From cee0cc4b786a9ed46da58b0c2ade18f19169350a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 14 Aug 2020 18:43:41 +0100 Subject: [PATCH 057/632] replace matchers with explicit visitation in gatherers (#366) --- libcst/codemod/visitors/_gather_comments.py | 19 +++++-- .../_gather_string_annotation_names.py | 55 ++++++++++++------- .../visitors/_gather_unused_imports.py | 41 ++++++++++---- 3 files changed, 80 insertions(+), 35 deletions(-) diff --git a/libcst/codemod/visitors/_gather_comments.py b/libcst/codemod/visitors/_gather_comments.py index d2510027..5adcecf0 100644 --- a/libcst/codemod/visitors/_gather_comments.py +++ b/libcst/codemod/visitors/_gather_comments.py @@ -7,7 +7,6 @@ import re from typing import Dict, Pattern, Union import libcst as cst -import libcst.matchers as m from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareVisitor from libcst.metadata import PositionProvider @@ -37,11 +36,21 @@ class GatherCommentsVisitor(ContextAwareVisitor): self._comment_matcher: Pattern[str] = re.compile(comment_regex) - @m.visit(m.EmptyLine(comment=m.DoesNotMatch(None))) - @m.visit(m.TrailingWhitespace(comment=m.DoesNotMatch(None))) - def visit_comment(self, node: Union[cst.EmptyLine, cst.TrailingWhitespace]) -> None: + def visit_EmptyLine(self, node: cst.EmptyLine) -> bool: + if node.comment is not None: + self.handle_comment(node) + return False + + def visit_TrailingWhitespace(self, node: cst.TrailingWhitespace) -> bool: + if node.comment is not None: + self.handle_comment(node) + return False + + def handle_comment( + self, node: Union[cst.EmptyLine, cst.TrailingWhitespace] + ) -> None: comment = node.comment - assert comment is not None # hello, type checker + assert comment is not None # ensured by callsites above if not self._comment_matcher.match(comment.value): return line = self.get_metadata(PositionProvider, comment).start.line diff --git a/libcst/codemod/visitors/_gather_string_annotation_names.py b/libcst/codemod/visitors/_gather_string_annotation_names.py index 80b73c76..c3d62445 100644 --- a/libcst/codemod/visitors/_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/_gather_string_annotation_names.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Set, Union, cast +from typing import Collection, List, Set, Union, cast import libcst as cst import libcst.matchers as m @@ -13,14 +13,6 @@ from libcst.metadata import MetadataWrapper, QualifiedNameProvider FUNCS_CONSIDERED_AS_STRING_ANNOTATIONS = {"typing.TypeVar"} -ANNOTATION_MATCHER: m.BaseMatcherNode = m.Annotation() | m.Call( - metadata=m.MatchMetadataIfTrue( - QualifiedNameProvider, - lambda qualnames: any( - qn.name in FUNCS_CONSIDERED_AS_STRING_ANNOTATIONS for qn in qualnames - ), - ) -) class GatherNamesFromStringAnnotationsVisitor(ContextAwareVisitor): @@ -35,14 +27,45 @@ class GatherNamesFromStringAnnotationsVisitor(ContextAwareVisitor): METADATA_DEPENDENCIES = (QualifiedNameProvider,) - def __init__(self, context: CodemodContext) -> None: + def __init__( + self, + context: CodemodContext, + typing_functions: Collection[str] = FUNCS_CONSIDERED_AS_STRING_ANNOTATIONS, + ) -> None: super().__init__(context) - + self._typing_functions: Collection[str] = typing_functions + self._annotation_stack: List[cst.CSTNode] = [] #: The set of names collected from string literals. self.names: Set[str] = set() - @m.call_if_inside(ANNOTATION_MATCHER) - @m.visit(m.ConcatenatedString()) + def visit_Annotation(self, node: cst.Annotation) -> bool: + self._annotation_stack.append(node) + return True + + def leave_Annotation(self, original_node: cst.Annotation) -> None: + self._annotation_stack.pop() + + def visit_Call(self, node: cst.Call) -> bool: + qnames = self.get_metadata(QualifiedNameProvider, node) + if any(qn.name in self._typing_functions for qn in qnames): + self._annotation_stack.append(node) + return True + return False + + def leave_Call(self, original_node: cst.Call) -> None: + if self._annotation_stack and self._annotation_stack[-1] == original_node: + self._annotation_stack.pop() + + def visit_ConcatenatedString(self, node: cst.ConcatenatedString) -> bool: + if self._annotation_stack: + self.handle_any_string(node) + return False + + def visit_SimpleString(self, node: cst.SimpleString) -> bool: + if self._annotation_stack: + self.handle_any_string(node) + return False + def handle_any_string( self, node: Union[cst.SimpleString, cst.ConcatenatedString] ) -> None: @@ -73,9 +96,3 @@ class GatherNamesFromStringAnnotationsVisitor(ContextAwareVisitor): ) } self.names.update(names) - - @m.call_if_inside(ANNOTATION_MATCHER) - @m.call_if_not_inside(m.ConcatenatedString()) - @m.visit(m.SimpleString()) - def handle_simple_string(self, node: cst.SimpleString) -> None: - self.handle_any_string(node) diff --git a/libcst/codemod/visitors/_gather_unused_imports.py b/libcst/codemod/visitors/_gather_unused_imports.py index 359eee69..82860325 100644 --- a/libcst/codemod/visitors/_gather_unused_imports.py +++ b/libcst/codemod/visitors/_gather_unused_imports.py @@ -4,20 +4,23 @@ # LICENSE file in the root directory of this source tree. # -from typing import Iterable, Set, Tuple, Union +from typing import Collection, Iterable, Set, Tuple, Union import libcst as cst -import libcst.matchers as m from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareVisitor from libcst.codemod.visitors._gather_exports import GatherExportsVisitor from libcst.codemod.visitors._gather_string_annotation_names import ( + FUNCS_CONSIDERED_AS_STRING_ANNOTATIONS, GatherNamesFromStringAnnotationsVisitor, ) from libcst.metadata import ProviderT, ScopeProvider from libcst.metadata.scope_provider import _gen_dotted_names +MODULES_IGNORED_BY_DEFAULT = {"__future__"} + + class GatherUnusedImportsVisitor(ContextAwareVisitor): """ Collects all imports from a module not directly used in the same module. @@ -37,9 +40,16 @@ class GatherUnusedImportsVisitor(ContextAwareVisitor): ScopeProvider, ) - def __init__(self, context: CodemodContext) -> None: + def __init__( + self, + context: CodemodContext, + ignored_modules: Collection[str] = MODULES_IGNORED_BY_DEFAULT, + typing_functions: Collection[str] = FUNCS_CONSIDERED_AS_STRING_ANNOTATIONS, + ) -> None: super().__init__(context) + self._ignored_modules: Collection[str] = ignored_modules + self._typing_functions = typing_functions self._string_annotation_names: Set[str] = set() self._exported_names: Set[str] = set() #: Contains a set of (alias, parent_import) pairs that are not used @@ -52,18 +62,27 @@ class GatherUnusedImportsVisitor(ContextAwareVisitor): export_collector = GatherExportsVisitor(self.context) node.visit(export_collector) self._exported_names = export_collector.explicit_exported_objects - annotation_visitor = GatherNamesFromStringAnnotationsVisitor(self.context) + annotation_visitor = GatherNamesFromStringAnnotationsVisitor( + self.context, typing_functions=self._typing_functions + ) node.visit(annotation_visitor) self._string_annotation_names = annotation_visitor.names return True - @m.visit( - m.Import() - | m.ImportFrom( - module=m.DoesNotMatch(m.Name("__future__")), - names=m.DoesNotMatch(m.ImportStar()), - ) - ) + def visit_Import(self, node: cst.Import) -> bool: + self.handle_import(node) + return False + + def visit_ImportFrom(self, node: cst.ImportFrom) -> bool: + module = node.module + if ( + not isinstance(node.names, cst.ImportStar) + and module is not None + and module.value not in self._ignored_modules + ): + self.handle_import(node) + return False + def handle_import(self, node: Union[cst.Import, cst.ImportFrom]) -> None: names = node.names assert not isinstance(names, cst.ImportStar) # hello, type checker From d948a60e84288338103ef85f3ff5b65558e362c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Wed, 12 Aug 2020 09:45:43 -0700 Subject: [PATCH 058/632] Handle more cases for __all__ (cherry picked from commit 95c7a5d5c50c72b835e10e49b7d5a0af327b7278) --- libcst/codemod/visitors/_gather_exports.py | 149 ++++++++++-------- .../visitors/tests/test_gather_exports.py | 30 +++- 2 files changed, 112 insertions(+), 67 deletions(-) diff --git a/libcst/codemod/visitors/_gather_exports.py b/libcst/codemod/visitors/_gather_exports.py index 9b28e25d..967f01e9 100644 --- a/libcst/codemod/visitors/_gather_exports.py +++ b/libcst/codemod/visitors/_gather_exports.py @@ -3,13 +3,12 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -from typing import Set +from typing import Set, Union -import libcst -import libcst.matchers as m +import libcst as cst from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareVisitor -from libcst.helpers import ensure_type, get_full_name_for_node +from libcst.helpers import get_full_name_for_node class GatherExportsVisitor(ContextAwareVisitor): @@ -32,7 +31,8 @@ class GatherExportsVisitor(ContextAwareVisitor): def __init__(self, context: CodemodContext) -> None: super().__init__(context) - # Track any re-exported objects in an __all__ reference + # Track any re-exported objects in an __all__ reference and whether + # they're defined or not self.explicit_exported_objects: Set[str] = set() # Presumably at some point in the future it would be useful to grab @@ -43,68 +43,87 @@ class GatherExportsVisitor(ContextAwareVisitor): # that we have a reasonable place to put implicit objects in the future. # Internal bookkeeping - self._in_assignment: int = 0 - self._in_list: int = 0 + self._is_assigned_export: Set[Union[cst.Tuple, cst.List, cst.Set]] = set() + self._in_assigned_export: Set[Union[cst.Tuple, cst.List, cst.Set]] = set() - def visit_AnnAssign(self, node: libcst.AnnAssign) -> bool: - target = get_full_name_for_node(node.target) - if target == "__all__": - self._in_assignment += 1 - return True - return False - - def leave_AnnAssign(self, original_node: libcst.AnnAssign) -> None: - self._in_assignment -= 1 - - def visit_Assign(self, node: libcst.Assign) -> bool: - for target_node in node.targets: - target = get_full_name_for_node(target_node.target) - if target == "__all__": - self._in_assignment += 1 + def visit_AnnAssign(self, node: cst.AnnAssign) -> bool: + value = node.value + if value: + if self._handle_assign_target(node.target, value): return True return False - def leave_Assign(self, original_node: libcst.Assign) -> None: - self._in_assignment -= 1 - - def visit_List(self, node: libcst.List) -> bool: - self._in_list += 1 - # Only visit list/set entries when we're in an __all__ - # assignment. We gate also by internal counters, so this - # is simply an optimization. - return self._in_assignment == 1 and self._in_list == 1 - - def leave_List(self, original_node: libcst.List) -> None: - self._in_list -= 1 - - def visit_Tuple(self, node: libcst.Tuple) -> bool: - self._in_list += 1 - # Only visit list/set entries when we're in an __all__ - # assignment. We gate also by internal counters, so this - # is simply an optimization. - return self._in_assignment == 1 and self._in_list == 1 - - def leave_Tuple(self, original_node: libcst.Tuple) -> None: - self._in_list -= 1 - - def visit_Set(self, node: libcst.Set) -> bool: - # Only visit list/set entries when we're in an __all__ - # assignment. We gate also by internal counters, so this - # is simply an optimization. - self._in_list += 1 - return self._in_assignment == 1 and self._in_list == 1 - - def leave_Set(self, original_node: libcst.Set) -> None: - self._in_list -= 1 - - def visit_Element(self, node: libcst.Element) -> bool: - # See if this is a entry that is a string. - extraction = self.extract( - node, m.Element(m.SaveMatchedNode(m.SimpleString(), "string")) - ) - if extraction: - string = ensure_type(extraction["string"], libcst.SimpleString) - self.explicit_exported_objects.add(string.evaluated_value) - - # Don't need to visit children + def visit_Assign(self, node: cst.Assign) -> bool: + for target_node in node.targets: + if self._handle_assign_target(target_node.target, node.value): + return True return False + + def _handle_assign_target( + self, target: cst.BaseExpression, value: cst.BaseExpression + ) -> bool: + target_name = get_full_name_for_node(target) + if target_name == "__all__": + # Assignments such as `__all__ = ["os"]` + # or `__all__ = exports = ["os"]` + if isinstance(value, (cst.List, cst.Tuple, cst.Set)): + self._is_assigned_export.add(value) + return True + elif isinstance(target, cst.Tuple) and isinstance(value, cst.Tuple): + # Assignments such as `__all__, x = ["os"], []` + for element_idx, element_node in enumerate(target.elements): + element_name = get_full_name_for_node(element_node.value) + if element_name == "__all__": + element_value = value.elements[element_idx].value + if isinstance(element_value, (cst.List, cst.Tuple, cst.Set)): + self._is_assigned_export.add(value) + self._is_assigned_export.add(element_value) + return True + return False + + def visit_List(self, node: cst.List) -> bool: + if node in self._is_assigned_export: + self._in_assigned_export.add(node) + return True + return False + + def leave_List(self, original_node: cst.List) -> None: + self._is_assigned_export.discard(original_node) + self._in_assigned_export.discard(original_node) + + def visit_Tuple(self, node: cst.Tuple) -> bool: + if node in self._is_assigned_export: + self._in_assigned_export.add(node) + return True + return False + + def leave_Tuple(self, original_node: cst.Tuple) -> None: + self._is_assigned_export.discard(original_node) + self._in_assigned_export.discard(original_node) + + def visit_Set(self, node: cst.Set) -> bool: + if node in self._is_assigned_export: + self._in_assigned_export.add(node) + return True + return False + + def leave_Set(self, original_node: cst.Set) -> None: + self._is_assigned_export.discard(original_node) + self._in_assigned_export.discard(original_node) + + def visit_SimpleString(self, node: cst.SimpleString) -> bool: + self._handle_string_export(node) + return False + + def visit_ConcatenatedString(self, node: cst.ConcatenatedString) -> bool: + self._handle_string_export(node) + return False + + def _handle_string_export( + self, node: Union[cst.SimpleString, cst.ConcatenatedString] + ) -> None: + if self._in_assigned_export: + name = node.evaluated_value + if name is None: + return + self.explicit_exported_objects.add(name) diff --git a/libcst/codemod/visitors/tests/test_gather_exports.py b/libcst/codemod/visitors/tests/test_gather_exports.py index 410d77d6..916eb5a2 100644 --- a/libcst/codemod/visitors/tests/test_gather_exports.py +++ b/libcst/codemod/visitors/tests/test_gather_exports.py @@ -26,6 +26,10 @@ class TestGatherExportsVisitor(UnitTest): list_of_str = ["foo", "bar", "baz"] + set_of_str = {"foo", "bar", "baz"} + + tuple_of_str = ("foo", "bar", "baz") + another: List[str] = ["foobar", "foobarbaz"] """ @@ -100,12 +104,34 @@ class TestGatherExportsVisitor(UnitTest): gatherer = self.gather_exports(code) self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"}) - def test_gather_exports_ignore_invalid_3(self) -> None: + def test_gather_exports_ignore_valid_1(self) -> None: code = """ from foo import bar from biz import baz - __all__ = ["bar", "baz", "foo""bar"] + __all__ = ["bar", "b""a""z"] + """ + + gatherer = self.gather_exports(code) + self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"}) + + def test_gather_exports_ignore_valid_2(self) -> None: + code = """ + from foo import bar + from biz import baz + + __all__, _ = ["bar", "baz"], ["biz"] + """ + + gatherer = self.gather_exports(code) + self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"}) + + def test_gather_exports_ignore_valid_3(self) -> None: + code = """ + from foo import bar + from biz import baz + + __all__ = exported = ["bar", "baz"] """ gatherer = self.gather_exports(code) From fa15c980ef122d6e55b1f0b7d98a9d49dd927ea6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Fri, 14 Aug 2020 12:42:42 -0700 Subject: [PATCH 059/632] Handle string annotations in ScopeProvider --- libcst/metadata/scope_provider.py | 39 ++++++++++++++++- libcst/metadata/tests/test_scope_provider.py | 44 +++++++++++++++++--- 2 files changed, 77 insertions(+), 6 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index bcea03c2..3eb79d35 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -699,9 +699,20 @@ class ScopeVisitor(cst.CSTVisitor): def visit_Call(self, node: cst.Call) -> Optional[bool]: self.__top_level_attribute_stack.append(None) + if any( + qn.name == "typing.TypeVar" + for qn in self.scope.get_qualified_names_for(node) + ): + node.func.visit(self) + self.__in_annotation.add(node) + for arg in node.args[1:]: + arg.visit(self) + return False + return True def leave_Call(self, original_node: cst.Call) -> None: self.__top_level_attribute_stack.pop() + self.__in_annotation.discard(original_node) def visit_Annotation(self, node: cst.Annotation) -> Optional[bool]: self.__in_annotation.add(node) @@ -709,12 +720,38 @@ class ScopeVisitor(cst.CSTVisitor): def leave_Annotation(self, original_node: cst.Annotation) -> None: self.__in_annotation.discard(original_node) + def visit_SimpleString(self, node: cst.SimpleString) -> Optional[bool]: + self._handle_string_annotation(node) + return False + + def visit_ConcatenatedString(self, node: cst.ConcatenatedString) -> Optional[bool]: + self._handle_string_annotation(node) + return False + + def _handle_string_annotation( + self, node: Union[cst.SimpleString, cst.ConcatenatedString] + ) -> None: + if self.__in_annotation: + value = node.evaluated_value + if value: + mod = cst.parse_module(value) + mod.visit(self) + + def visit_Subscript(self, node: cst.Subscript) -> Optional[bool]: + if any( + qn.name == "typing.Literal" + for qn in self.scope.get_qualified_names_for(node.value) + ): + node.value.visit(self) + return False + return True + def visit_Name(self, node: cst.Name) -> Optional[bool]: # not all Name have ExpressionContext context = self.provider.get_metadata(ExpressionContextProvider, node, None) if context == ExpressionContext.STORE: self.scope.record_assignment(node.value, node) - elif context in (ExpressionContext.LOAD, ExpressionContext.DEL): + elif context in (ExpressionContext.LOAD, ExpressionContext.DEL, None): access = Access(node, self.scope, is_annotation=bool(self.__in_annotation)) self.__deferred_accesses.append( (access, self.__top_level_attribute_stack[-1]) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index bee13d50..b05ee831 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1013,22 +1013,56 @@ class ScopeProviderTest(UnitTest): def test_annotation_access(self) -> None: m, scopes = get_scope_metadata_provider( """ - from t import T - def f(t: T): + from typing import Literal, TypeVar + from a import A, B, C, D, E, F + def x(a: A): pass + def y(b: B): + pass + def z(c: Literal["C"]): + pass + DType = TypeVar("DType", bound=D) + EType = TypeVar("EType", bound="E") + FType = TypeVar("F") """ ) imp = ensure_type( - ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.ImportFrom + ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.ImportFrom ) scope = scopes[imp] - assignments = list(scope["T"]) - assignment = assignments[0] + + assignment = list(scope["A"])[0] self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) references = list(assignment.references) self.assertTrue(references[0].is_annotation) + assignment = list(scope["B"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertTrue(references[0].is_annotation) + + assignment = list(scope["C"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 0) + + assignment = list(scope["D"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertTrue(references[0].is_annotation) + + assignment = list(scope["E"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertTrue(references[0].is_annotation) + + assignment = list(scope["F"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 0) + def test_node_of_scopes(self) -> None: m, scopes = get_scope_metadata_provider( """ From 825931acd67df2302243713eb4dd9f87fa633f4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Fri, 14 Aug 2020 15:25:59 -0700 Subject: [PATCH 060/632] Fix `test_annotation_access` test case. (#374) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Germán Méndez Bravo --- libcst/metadata/tests/test_scope_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index b05ee831..a35b444e 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1017,7 +1017,7 @@ class ScopeProviderTest(UnitTest): from a import A, B, C, D, E, F def x(a: A): pass - def y(b: B): + def y(b: "B"): pass def z(c: Literal["C"]): pass From 6f915ab39b5afc73ab2a4864382ece265c045d8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Fri, 14 Aug 2020 15:32:12 -0700 Subject: [PATCH 061/632] Literal could be imported from typing_extensions too (#375) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Germán Méndez Bravo --- libcst/metadata/scope_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 3eb79d35..2df82047 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -739,7 +739,7 @@ class ScopeVisitor(cst.CSTVisitor): def visit_Subscript(self, node: cst.Subscript) -> Optional[bool]: if any( - qn.name == "typing.Literal" + qn.name in ("typing.Literal", "typing_extensions.Literal") for qn in self.scope.get_qualified_names_for(node.value) ): node.value.visit(self) From 6f9a12c538bd2db9f3d6ff0618711542f1adfef5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Mon, 17 Aug 2020 10:58:14 -0700 Subject: [PATCH 062/632] Bump to version 0.3.10 (#376) (cherry picked from commit aa1f7ced5d55ff059868964f26e0621a7afd5332) --- CHANGELOG.md | 26 +++++++++++++++++++++----- libcst/_version.py | 2 +- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2f936ef8..ee575617 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,13 +1,29 @@ +# 0.3.10 - 2020-09-17 + +## Added +- Handle string annotations in ScopeProvider [#373](https://github.com/Instagram/LibCST/pull/373) +- Add is_annotation subtype for Access inreferences. [#372](https://github.com/Instagram/LibCST/pull/372) + +## Updated +- Call pyre query with noninteractive logging [#371](https://github.com/Instagram/LibCST/pull/371) +- Replace matchers with explicit visitation in gatherers [#366](https://github.com/Instagram/LibCST/pull/366) +- Include missing test data in install [#365](https://github.com/Instagram/LibCST/pull/365) + +## Fixed +- Spaces around walrus operator are not required [#368](https://github.com/Instagram/LibCST/pull/368) +- SaveMachedNode now matches with trailing empty wildcards [#356](https://github.com/Instagram/LibCST/pull/356) +- Correctly extract wildcard matchers [#355](https://github.com/Instagram/LibCST/pull/355) + # 0.3.9 - 2020-09-07 ## Added - - Support string type annotations in RemoveUnusedImports [#353](https://github.com/Instagram/LibCST/issues/353) - - Add scope to ImportAlias [#350](https://github.com/Instagram/LibCST/issues/350) - - Add scope to ClassDef [#349](https://github.com/Instagram/LibCST/issues/349) + - Support string type annotations in RemoveUnusedImports [#353](https://github.com/Instagram/LibCST/pull/353) + - Add scope to ImportAlias [#350](https://github.com/Instagram/LibCST/pull/350) + - Add scope to ClassDef [#349](https://github.com/Instagram/LibCST/pull/349) ## Fixed - - Fixed all pyre related errors [#360](https://github.com/Instagram/LibCST/issues/360) - - Fixed enclosing attribute for attributes in call arguments [#362](https://github.com/Instagram/LibCST/issues/362) + - Fixed all pyre related errors [#360](https://github.com/Instagram/LibCST/pull/360) + - Fixed enclosing attribute for attributes in call arguments [#362](https://github.com/Instagram/LibCST/pull/362) # 0.3.8 - 2020-07-22 diff --git a/libcst/_version.py b/libcst/_version.py index 2adb8c1c..343c1bb2 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.9" +LIBCST_VERSION: str = "0.3.10" From dc6e7ba6f733bc2f7d311bf17edaee67aa27d7f9 Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Mon, 17 Aug 2020 15:09:13 -0700 Subject: [PATCH 063/632] [metadata] update return type of ParentNodeProvider to be CSTNode (#377) --- libcst/metadata/parent_node_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/metadata/parent_node_provider.py b/libcst/metadata/parent_node_provider.py index fa36e94d..1e569d51 100644 --- a/libcst/metadata/parent_node_provider.py +++ b/libcst/metadata/parent_node_provider.py @@ -21,6 +21,6 @@ class ParentNodeVisitor(cst.CSTVisitor): super().on_leave(original_node) -class ParentNodeProvider(BatchableMetadataProvider[Optional[cst.CSTNode]]): +class ParentNodeProvider(BatchableMetadataProvider[cst.CSTNode]): def visit_Module(self, node: cst.Module) -> Optional[bool]: node.visit(ParentNodeVisitor(self)) From 748ae7190996df6bfc355f20d77d75d79252c5a5 Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Thu, 20 Aug 2020 17:15:21 -0700 Subject: [PATCH 064/632] [docs] add source code links to each class/function (#378) --- docs/source/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index 07c7839d..3eaff6a0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -50,6 +50,7 @@ extensions = [ "sphinx.ext.autosummary", "sphinx.ext.graphviz", "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", "sphinx_rtd_theme", ] From 7ca738bf3964ac48ebb36b1b362153c87c29a706 Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Mon, 31 Aug 2020 10:44:55 -0700 Subject: [PATCH 065/632] Upgrade dev tools (Black/Flake8/isort) and read install requirements from requirements.txt (#380) * Read install requirements from requirements.txt * read extras_require from requirements-dev.txt * add requirements-dev.txt to MANIFEST.in * apply fixes for new version of Black and Flake8 * don't upgrade Pyre * re-format --- MANIFEST.in | 2 +- libcst/_exceptions.py | 2 +- libcst/_nodes/base.py | 4 +- libcst/_nodes/expression.py | 4 +- libcst/_nodes/tests/test_atom.py | 24 ++- libcst/_nodes/tests/test_docstring.py | 5 +- libcst/_nodes/tests/test_funcdef.py | 31 +++- libcst/_parser/conversions/expression.py | 8 +- libcst/_parser/grammar.py | 9 +- libcst/_parser/parso/pgen2/grammar_parser.py | 12 +- libcst/_typed_visitor.py | 124 ++++++------- libcst/codegen/gen_matcher_classes.py | 2 +- libcst/codegen/gen_visitor_functions.py | 2 +- libcst/codegen/generate.py | 4 +- libcst/codemod/_cli.py | 15 +- .../commands/convert_format_to_fstring.py | 13 +- .../convert_percent_format_to_fstring.py | 3 +- libcst/codemod/commands/rename.py | 15 +- .../commands/strip_strings_from_types.py | 4 +- .../tests/test_convert_format_to_fstring.py | 13 +- libcst/codemod/commands/tests/test_rename.py | 115 +++++++++--- .../commands/unnecessary_format_string.py | 4 +- libcst/codemod/tests/test_codemod_cli.py | 3 +- .../visitors/_apply_type_annotations.py | 5 +- libcst/helpers/_template.py | 35 +++- libcst/helpers/tests/test_expression.py | 4 +- libcst/helpers/tests/test_statement.py | 5 +- libcst/helpers/tests/test_template.py | 168 +++++++++++++----- libcst/matchers/_matcher_base.py | 8 +- libcst/matchers/_visitors.py | 4 +- .../tests/test_matchers_with_metadata.py | 33 ++-- .../metadata/expression_context_provider.py | 2 +- libcst/metadata/scope_provider.py | 2 +- .../metadata/tests/test_reentrant_codegen.py | 4 +- libcst/metadata/tests/test_scope_provider.py | 7 +- libcst/metadata/wrapper.py | 2 +- libcst/tests/test_pyre_integration.py | 7 +- libcst/tests/test_type_enforce.py | 7 +- pyproject.toml | 1 - requirements-dev.txt | 24 +-- requirements.txt | 8 +- setup.py | 22 +-- tox.ini | 4 +- 43 files changed, 503 insertions(+), 267 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index c95924a3..4402255d 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1 @@ -include README.rst LICENSE CODE_OF_CONDUCT.md CONTRIBUTING.md requirements.txt docs/source/*.rst libcst/py.typed +include README.rst LICENSE CODE_OF_CONDUCT.md CONTRIBUTING.md requirements.txt requirements-dev.txt docs/source/*.rst libcst/py.typed diff --git a/libcst/_exceptions.py b/libcst/_exceptions.py index 354f6715..fe41f0ee 100644 --- a/libcst/_exceptions.py +++ b/libcst/_exceptions.py @@ -161,7 +161,7 @@ class ParserSyntaxError(Exception): def __repr__(self) -> str: return ( - f"ParserSyntaxError(" + "ParserSyntaxError(" + f"{self.message!r}, lines=[...], raw_line={self.raw_line!r}, " + f"raw_column={self.raw_column!r})" ) diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index 14c0bf39..fe2988c9 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -91,7 +91,7 @@ def _pretty_repr_sequence(seq: Sequence[object]) -> str: def _indent(value: str) -> str: - return "\n".join(f" {l}" for l in value.split("\n")) + return "\n".join(f" {line}" for line in value.split("\n")) def _clone(val: object) -> object: @@ -236,7 +236,7 @@ class CSTNode(ABC): # validate return type of the user-defined `visitor.on_leave` method if not isinstance(leave_result, (CSTNode, RemovalSentinel)): raise Exception( - f"Expected a node of type CSTNode or a RemovalSentinel, " + "Expected a node of type CSTNode or a RemovalSentinel, " + f"but got a return value of {type(leave_result).__name__}" ) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 679d7b94..25b372d1 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -758,8 +758,8 @@ class FormattedStringExpression(BaseFormattedStringContent): format_spec: Optional[Sequence[BaseFormattedStringContent]] = None #: Whitespace after the opening curly brace (``{``), but before the ``expression``. - whitespace_before_expression: BaseParenthesizableWhitespace = SimpleWhitespace.field( - "" + whitespace_before_expression: BaseParenthesizableWhitespace = ( + SimpleWhitespace.field("") ) #: Whitespace after the ``expression``, but before the ``conversion``, diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index a35790eb..452c9f70 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -591,11 +591,15 @@ class AtomTest(CSTNodeTest): parts=( cst.FormattedStringExpression( cst.Comparison( - left=cst.Name(value="a",), + left=cst.Name( + value="a", + ), comparisons=[ cst.ComparisonTarget( operator=cst.Equal(), - comparator=cst.Name(value="b",), + comparator=cst.Name( + value="b", + ), ), ], ), @@ -611,11 +615,15 @@ class AtomTest(CSTNodeTest): parts=( cst.FormattedStringExpression( cst.Comparison( - left=cst.Name(value="a",), + left=cst.Name( + value="a", + ), comparisons=[ cst.ComparisonTarget( operator=cst.NotEqual(), - comparator=cst.Name(value="b",), + comparator=cst.Name( + value="b", + ), ), ], ), @@ -631,8 +639,12 @@ class AtomTest(CSTNodeTest): parts=( cst.FormattedStringExpression( cst.NamedExpr( - target=cst.Name(value="a",), - value=cst.Integer(value="5",), + target=cst.Name( + value="a", + ), + value=cst.Integer( + value="5", + ), lpar=(cst.LeftParen(),), rpar=(cst.RightParen(),), ), diff --git a/libcst/_nodes/tests/test_docstring.py b/libcst/_nodes/tests/test_docstring.py index 0f46f2f6..6e05d0fa 100644 --- a/libcst/_nodes/tests/test_docstring.py +++ b/libcst/_nodes/tests/test_docstring.py @@ -25,7 +25,10 @@ class DocstringTest(UnitTest): ''', "docstring in triple quotes.", ), - ('''"docstring in single quotes."''', "docstring in single quotes.",), + ( + '''"docstring in single quotes."''', + "docstring in single quotes.", + ), ( ''' # comment diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 9fef83a6..a5d0d080 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -667,15 +667,31 @@ class FunctionDefCreationTest(CSTNodeTest): }, { "node": cst.FunctionDef( - name=cst.Name(value="foo",), + name=cst.Name( + value="foo", + ), params=cst.Parameters( - params=[cst.Param(name=cst.Name(value="param1",),),], + params=[ + cst.Param( + name=cst.Name( + value="param1", + ), + ), + ], ), body=cst.IndentedBlock( - body=[cst.SimpleStatementLine(body=[cst.Pass(),],),], + body=[ + cst.SimpleStatementLine( + body=[ + cst.Pass(), + ], + ), + ], ), whitespace_before_params=cst.ParenthesizedWhitespace( - last_line=cst.SimpleWhitespace(value=" ",), + last_line=cst.SimpleWhitespace( + value=" ", + ), ), ), "code": "def foo(\n param1):\n pass\n", @@ -1912,7 +1928,12 @@ class FunctionDefParserTest(CSTNodeTest): posonly_ind=cst.ParamSlash( comma=cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), ), - params=(cst.Param(cst.Name("baz"), star="",),), + params=( + cst.Param( + cst.Name("baz"), + star="", + ), + ), ), cst.SimpleStatementSuite((cst.Pass(),)), ), diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index 1642d42a..8edbf262 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -1113,7 +1113,9 @@ def convert_fstring_format_spec( version=">=3.5,<3.8", ) @with_production( - "testlist_comp_tuple", "(test) ( comp_for | (',' (test))* [','] )", version="<3.5", + "testlist_comp_tuple", + "(test) ( comp_for | (',' (test))* [','] )", + version="<3.5", ) def convert_testlist_comp_tuple( config: ParserConfig, children: typing.Sequence[typing.Any] @@ -1138,7 +1140,9 @@ def convert_testlist_comp_tuple( version=">=3.5,<3.8", ) @with_production( - "testlist_comp_list", "(test) ( comp_for | (',' (test))* [','] )", version="<3.5", + "testlist_comp_list", + "(test) ( comp_for | (',' (test))* [','] )", + version="<3.5", ) def convert_testlist_comp_list( config: ParserConfig, children: typing.Sequence[typing.Any] diff --git a/libcst/_parser/grammar.py b/libcst/_parser/grammar.py index 631714e3..c8c24025 100644 --- a/libcst/_parser/grammar.py +++ b/libcst/_parser/grammar.py @@ -288,7 +288,8 @@ def get_grammar_str(version: PythonVersionInfo, future_imports: FrozenSet[str]) # impossible. @lru_cache() def get_grammar( - version: PythonVersionInfo, future_imports: Union[FrozenSet[str], AutoConfig], + version: PythonVersionInfo, + future_imports: Union[FrozenSet[str], AutoConfig], ) -> "Grammar[TokenType]": if isinstance(future_imports, AutoConfig): # For easier testing, if not provided assume no __future__ imports @@ -366,7 +367,8 @@ def _should_include( def _should_include_future( - future: Optional[str], future_imports: FrozenSet[str], + future: Optional[str], + future_imports: FrozenSet[str], ) -> bool: if future is None: return True @@ -389,7 +391,8 @@ def get_nonterminal_productions( @lru_cache() def get_nonterminal_conversions( - version: PythonVersionInfo, future_imports: FrozenSet[str], + version: PythonVersionInfo, + future_imports: FrozenSet[str], ) -> Mapping[str, NonterminalConversion]: """ Returns a mapping from nonterminal production name to the conversion function that diff --git a/libcst/_parser/parso/pgen2/grammar_parser.py b/libcst/_parser/parso/pgen2/grammar_parser.py index 0d30199d..5d0f2229 100644 --- a/libcst/_parser/parso/pgen2/grammar_parser.py +++ b/libcst/_parser/parso/pgen2/grammar_parser.py @@ -93,10 +93,14 @@ class GrammarParser: def _parse_items(self): # items: item+ a, b = self._parse_item() - while self.type in ( - PythonTokenTypes.NAME, - PythonTokenTypes.STRING, - ) or self.value in ("(", "["): + while ( + self.type + in ( + PythonTokenTypes.NAME, + PythonTokenTypes.STRING, + ) + or self.value in ("(", "[") + ): c, d = self._parse_item() # Need to end on the next item. b.add_arc(c) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 06a18a65..bbc10d55 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -13,6 +13,68 @@ from libcst._typed_visitor_base import mark_no_op if TYPE_CHECKING: + from libcst._nodes.expression import ( # noqa: F401 + Annotation, + Arg, + Asynchronous, + Attribute, + Await, + BaseDictElement, + BaseElement, + BaseExpression, + BaseFormattedStringContent, + BaseSlice, + BinaryOperation, + BooleanOperation, + Call, + Comparison, + ComparisonTarget, + CompFor, + CompIf, + ConcatenatedString, + Dict, + DictComp, + DictElement, + Element, + Ellipsis, + Float, + FormattedString, + FormattedStringExpression, + FormattedStringText, + From, + GeneratorExp, + IfExp, + Imaginary, + Index, + Integer, + Lambda, + LeftCurlyBrace, + LeftParen, + LeftSquareBracket, + List, + ListComp, + Name, + NamedExpr, + Param, + Parameters, + ParamSlash, + ParamStar, + RightCurlyBrace, + RightParen, + RightSquareBracket, + Set, + SetComp, + SimpleString, + Slice, + StarredDictElement, + StarredElement, + Subscript, + SubscriptElement, + Tuple, + UnaryOperation, + Yield, + ) + from libcst._nodes.module import Module # noqa: F401 from libcst._nodes.op import ( # noqa: F401 Add, AddAssign, @@ -107,67 +169,6 @@ if TYPE_CHECKING: With, WithItem, ) - from libcst._nodes.expression import ( # noqa: F401 - Annotation, - Arg, - Asynchronous, - Attribute, - Await, - BaseDictElement, - BaseElement, - BaseExpression, - BaseFormattedStringContent, - BaseSlice, - BinaryOperation, - BooleanOperation, - Call, - CompFor, - CompIf, - Comparison, - ComparisonTarget, - ConcatenatedString, - Dict, - DictComp, - DictElement, - Element, - Ellipsis, - Float, - FormattedString, - FormattedStringExpression, - FormattedStringText, - From, - GeneratorExp, - IfExp, - Imaginary, - Index, - Integer, - Lambda, - LeftCurlyBrace, - LeftParen, - LeftSquareBracket, - List, - ListComp, - Name, - NamedExpr, - Param, - ParamSlash, - ParamStar, - Parameters, - RightCurlyBrace, - RightParen, - RightSquareBracket, - Set, - SetComp, - SimpleString, - Slice, - StarredDictElement, - StarredElement, - Subscript, - SubscriptElement, - Tuple, - UnaryOperation, - Yield, - ) from libcst._nodes.whitespace import ( # noqa: F401 BaseParenthesizableWhitespace, Comment, @@ -177,7 +178,6 @@ if TYPE_CHECKING: SimpleWhitespace, TrailingWhitespace, ) - from libcst._nodes.module import Module # noqa: F401 class CSTTypedBaseFunctions: diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index cfde98a3..5c6a550d 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -553,7 +553,7 @@ for node in all_libcst_nodes: # Add special metadata field generated_code.append( - f" metadata: Union[MetadataMatchType, DoNotCareSentinel, OneOf[MetadataMatchType], AllOf[MetadataMatchType]] = DoNotCare()" + " metadata: Union[MetadataMatchType, DoNotCareSentinel, OneOf[MetadataMatchType], AllOf[MetadataMatchType]] = DoNotCare()" ) diff --git a/libcst/codegen/gen_visitor_functions.py b/libcst/codegen/gen_visitor_functions.py index 8eec1bc0..d9a9401b 100644 --- a/libcst/codegen/gen_visitor_functions.py +++ b/libcst/codegen/gen_visitor_functions.py @@ -29,7 +29,7 @@ generated_code.append("from libcst._typed_visitor_base import mark_no_op") # cause an import cycle otherwise. generated_code.append("") generated_code.append("") -generated_code.append(f"if TYPE_CHECKING:") +generated_code.append("if TYPE_CHECKING:") for module, objects in imports.items(): generated_code.append(f" from {module} import ( # noqa: F401") generated_code.append(f" {', '.join(sorted(list(objects)))}") diff --git a/libcst/codegen/generate.py b/libcst/codegen/generate.py index c6bf67b3..60a952f2 100644 --- a/libcst/codegen/generate.py +++ b/libcst/codegen/generate.py @@ -26,9 +26,7 @@ from libcst.codegen.transforms import ( def format_file(fname: str) -> None: with open(os.devnull, "w") as devnull: - subprocess.check_call( - ["isort", "-y", "-q", fname], stdout=devnull, stderr=devnull - ) + subprocess.check_call(["isort", "-q", fname], stdout=devnull, stderr=devnull) subprocess.check_call(["black", fname], stdout=devnull, stderr=devnull) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index b9ffd280..73e443ec 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -162,9 +162,11 @@ def exec_transform_with_prettyprint( return code result = transform_module(transform, code, python_version=python_version) - code: Optional[str] = None if isinstance( - result, (TransformFailure, TransformExit, TransformSkip) - ) else result.code + code: Optional[str] = ( + None + if isinstance(result, (TransformFailure, TransformExit, TransformSkip)) + else result.code + ) if code is not None and format_code: try: @@ -603,11 +605,14 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 # like type inference to individual forked processes. print("Calculating full-repo metadata...", file=sys.stderr) metadata_manager = FullRepoManager( - repo_root, files, transform.get_inherited_dependencies(), + repo_root, + files, + transform.get_inherited_dependencies(), ) metadata_manager.resolve_cache() transform.context = replace( - transform.context, metadata_manager=metadata_manager, + transform.context, + metadata_manager=metadata_manager, ) print("Executing codemod...", file=sys.stderr) diff --git a/libcst/codemod/commands/convert_format_to_fstring.py b/libcst/codemod/commands/convert_format_to_fstring.py index 92431698..39b2f96c 100644 --- a/libcst/codemod/commands/convert_format_to_fstring.py +++ b/libcst/codemod/commands/convert_format_to_fstring.py @@ -364,20 +364,21 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): if self.findall(expr, m.Comment()) and not self.allow_strip_comments: # We could strip comments, but this is a formatting change so # we choose not to for now. - self.warn(f"Unsupported comment in format() call") + self.warn("Unsupported comment in format() call") return None if self.findall(expr, m.FormattedString()): - self.warn(f"Unsupported f-string in format() call") + self.warn("Unsupported f-string in format() call") return None if self.findall(expr, m.Await()) and not self.allow_await: # This is fixed in 3.7 but we don't currently have a flag # to enable/disable it. - self.warn(f"Unsupported await in format() call") + self.warn("Unsupported await in format() call") return None # Stripping newlines is effectively a format-only change. expr = cst.ensure_type( - expr.visit(StripNewlinesTransformer(self.context)), cst.BaseExpression, + expr.visit(StripNewlinesTransformer(self.context)), + cst.BaseExpression, ) # Try our best to swap quotes on any strings that won't fit @@ -392,7 +393,7 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): # in it. raw_expr_string = self.module.code_for_node(expr) if "\\" in raw_expr_string: - self.warn(f"Unsupported backslash in format expression") + self.warn("Unsupported backslash in format expression") return None # For safety sake, if this is a dict/set or dict/set comprehension, @@ -408,7 +409,7 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): expr.visit(quote_gatherer) for stringend in quote_gatherer.stringends: if stringend in containing_string.quote: - self.warn(f"Cannot embed string with same quote from format() call") + self.warn("Cannot embed string with same quote from format() call") return None return cst.FormattedStringExpression(expression=expr, conversion=conversion) diff --git a/libcst/codemod/commands/convert_percent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py index f56b055e..35032719 100644 --- a/libcst/codemod/commands/convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/convert_percent_format_to_fstring.py @@ -77,7 +77,8 @@ class ConvertPercentFormatStringCommand(VisitorBasedCodemodCommand): left=m.MatchIfTrue(_match_simple_string), operator=m.Modulo(), right=m.SaveMatchedNode( - m.MatchIfTrue(_gen_match_simple_expression(self.module)), expr_key, + m.MatchIfTrue(_gen_match_simple_expression(self.module)), + expr_key, ), ), ) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index ce50cc12..03d5ddef 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -259,8 +259,13 @@ class RenameCommand(VisitorBasedCodemodCommand): inside_import_statement: bool = not self.get_metadata( QualifiedNameProvider, original_node, set() ) - if QualifiedNameProvider.has_name(self, original_node, self.old_name,) or ( - inside_import_statement and full_replacement_name == self.new_name + if ( + QualifiedNameProvider.has_name( + self, + original_node, + self.old_name, + ) + or (inside_import_statement and full_replacement_name == self.new_name) ): new_value, new_attr = self.new_module, self.new_mod_or_obj if not inside_import_statement: @@ -286,9 +291,9 @@ class RenameCommand(VisitorBasedCodemodCommand): # that we have any `self.scheduled_removals` tells us we encountered a matching `old_name` in the code. if not self.bypass_import and self.scheduled_removals: if self.new_module: - new_obj: Optional[str] = self.new_mod_or_obj.split(".")[ - 0 - ] if self.new_mod_or_obj else None + new_obj: Optional[str] = ( + self.new_mod_or_obj.split(".")[0] if self.new_mod_or_obj else None + ) AddImportsVisitor.add_needed_import( self.context, module=self.new_module, obj=new_obj ) diff --git a/libcst/codemod/commands/strip_strings_from_types.py b/libcst/codemod/commands/strip_strings_from_types.py index eee89fe4..b219a7db 100644 --- a/libcst/codemod/commands/strip_strings_from_types.py +++ b/libcst/codemod/commands/strip_strings_from_types.py @@ -15,7 +15,9 @@ from libcst.metadata import QualifiedNameProvider class StripStringsCommand(VisitorBasedCodemodCommand): - DESCRIPTION: str = "Converts string type annotations to 3.7-compatible forward references." + DESCRIPTION: str = ( + "Converts string type annotations to 3.7-compatible forward references." + ) METADATA_DEPENDENCIES = (QualifiedNameProvider,) diff --git a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py index ee1d3cb0..35a7aece 100644 --- a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py +++ b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py @@ -150,7 +150,10 @@ class ConvertFormatStringCommandTest(CodemodTest): """ self.assertCodemod( - before, after, allow_strip_comments=True, python_version="3.7", + before, + after, + allow_strip_comments=True, + python_version="3.7", ) def test_enable_unsupported_await(self) -> None: @@ -168,7 +171,10 @@ class ConvertFormatStringCommandTest(CodemodTest): """ self.assertCodemod( - before, after, allow_await=True, python_version="3.7", + before, + after, + allow_await=True, + python_version="3.7", ) def test_formatspec_conversion(self) -> None: @@ -203,7 +209,8 @@ class ConvertFormatStringCommandTest(CodemodTest): return f"{x:0>3d}" """ self.assertCodemod( - before, after, + before, + after, ) def test_position_replacement(self) -> None: diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index b5427280..aa95801e 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -46,7 +46,10 @@ class TestRenameCommand(CodemodTest): """ self.assertCodemod( - before, after, old_name="foo.bar", new_name="baz.qux", + before, + after, + old_name="foo.bar", + new_name="baz.qux", ) def test_rename_repeated_name_with_asname(self) -> None: @@ -63,7 +66,10 @@ class TestRenameCommand(CodemodTest): qux.bla(5) """ self.assertCodemod( - before, after, old_name="foo.foo", new_name="baz.qux", + before, + after, + old_name="foo.foo", + new_name="baz.qux", ) def test_rename_attr(self) -> None: @@ -82,7 +88,10 @@ class TestRenameCommand(CodemodTest): """ self.assertCodemod( - before, after, old_name="a.b.c", new_name="d.e.f", + before, + after, + old_name="a.b.c", + new_name="d.e.f", ) def test_rename_attr_asname(self) -> None: @@ -101,7 +110,10 @@ class TestRenameCommand(CodemodTest): """ self.assertCodemod( - before, after, old_name="foo.qux", new_name="baz.quux", + before, + after, + old_name="foo.qux", + new_name="baz.quux", ) def test_rename_module_import(self) -> None: @@ -119,7 +131,10 @@ class TestRenameCommand(CodemodTest): """ self.assertCodemod( - before, after, old_name="a.b", new_name="c.b", + before, + after, + old_name="a.b", + new_name="c.b", ) def test_rename_module_import_2(self) -> None: @@ -137,7 +152,10 @@ class TestRenameCommand(CodemodTest): """ self.assertCodemod( - before, after, old_name="a", new_name="c", + before, + after, + old_name="a", + new_name="c", ) def test_rename_module_import_no_change(self) -> None: @@ -149,7 +167,10 @@ class TestRenameCommand(CodemodTest): pass """ self.assertCodemod( - before, before, old_name="b", new_name="c.b", + before, + before, + old_name="b", + new_name="c.b", ) def test_rename_module_import_from(self) -> None: @@ -167,7 +188,10 @@ class TestRenameCommand(CodemodTest): """ self.assertCodemod( - before, after, old_name="a.b", new_name="c.b", + before, + after, + old_name="a.b", + new_name="c.b", ) def test_rename_module_import_from_2(self) -> None: @@ -185,7 +209,10 @@ class TestRenameCommand(CodemodTest): """ self.assertCodemod( - before, after, old_name="a", new_name="c", + before, + after, + old_name="a", + new_name="c", ) def test_rename_class(self) -> None: @@ -202,7 +229,10 @@ class TestRenameCommand(CodemodTest): pass """ self.assertCodemod( - before, after, old_name="a.b.some_class", new_name="c.b.some_class", + before, + after, + old_name="a.b.some_class", + new_name="c.b.some_class", ) def test_rename_importfrom_same_module(self) -> None: @@ -219,7 +249,10 @@ class TestRenameCommand(CodemodTest): pass """ self.assertCodemod( - before, after, old_name="a.b.Class_1", new_name="a.b.Class_3", + before, + after, + old_name="a.b.Class_1", + new_name="a.b.Class_3", ) def test_rename_importfrom_same_module_2(self) -> None: @@ -258,7 +291,10 @@ class TestRenameCommand(CodemodTest): """ self.assertCodemod( - before, after, old_name="x", new_name="z", + before, + after, + old_name="x", + new_name="z", ) def test_module_does_not_change(self) -> None: @@ -292,7 +328,10 @@ class TestRenameCommand(CodemodTest): baz: c.baz """ self.assertCodemod( - before, after, old_name="a.z", new_name="d.z", + before, + after, + old_name="a.z", + new_name="d.z", ) def test_other_import_froms_untouched(self) -> None: @@ -312,7 +351,10 @@ class TestRenameCommand(CodemodTest): baz: d.baz """ self.assertCodemod( - before, after, old_name="a.b", new_name="f.b", + before, + after, + old_name="a.b", + new_name="f.b", ) def test_no_removal_of_import_in_use(self) -> None: @@ -333,7 +375,10 @@ class TestRenameCommand(CodemodTest): pass """ self.assertCodemod( - before, after, old_name="a.b", new_name="z.b", + before, + after, + old_name="a.b", + new_name="z.b", ) def test_no_removal_of_dotted_import_in_use(self) -> None: @@ -354,7 +399,10 @@ class TestRenameCommand(CodemodTest): pass """ self.assertCodemod( - before, after, old_name="a.b.c", new_name="z.b.c", + before, + after, + old_name="a.b.c", + new_name="z.b.c", ) def test_no_removal_of_import_from_in_use(self) -> None: @@ -372,7 +420,10 @@ class TestRenameCommand(CodemodTest): bar: b.some_other_class """ self.assertCodemod( - before, after, old_name="a.b.some_class", new_name="blah.some_class", + before, + after, + old_name="a.b.some_class", + new_name="blah.some_class", ) def test_other_unused_imports_untouched(self) -> None: @@ -391,7 +442,10 @@ class TestRenameCommand(CodemodTest): pass """ self.assertCodemod( - before, after, old_name="a.obj", new_name="c.obj", + before, + after, + old_name="a.obj", + new_name="c.obj", ) def test_complex_module_rename(self) -> None: @@ -438,7 +492,10 @@ class TestRenameCommand(CodemodTest): pass """ self.assertCodemod( - before, after, old_name="aa.aaaa", new_name="b.c", + before, + after, + old_name="aa.aaaa", + new_name="b.c", ) def test_repeated_name(self) -> None: @@ -455,7 +512,10 @@ class TestRenameCommand(CodemodTest): qux(5) """ self.assertCodemod( - before, after, old_name="foo.foo", new_name="qux.qux", + before, + after, + old_name="foo.foo", + new_name="qux.qux", ) def test_no_codemod(self) -> None: @@ -466,7 +526,10 @@ class TestRenameCommand(CodemodTest): bar(5) """ self.assertCodemod( - before, before, old_name="bar", new_name="qux", + before, + before, + old_name="bar", + new_name="qux", ) def test_rename_import_prefix(self) -> None: @@ -477,7 +540,10 @@ class TestRenameCommand(CodemodTest): import x.y.c.d """ self.assertCodemod( - before, after, old_name="a.b", new_name="x.y", + before, + after, + old_name="a.b", + new_name="x.y", ) def test_rename_import_from_prefix(self) -> None: @@ -488,7 +554,10 @@ class TestRenameCommand(CodemodTest): from x.y.c.d import foo """ self.assertCodemod( - before, after, old_name="a.b", new_name="x.y", + before, + after, + old_name="a.b", + new_name="x.y", ) def test_rename_multiple_occurrences(self) -> None: diff --git a/libcst/codemod/commands/unnecessary_format_string.py b/libcst/codemod/commands/unnecessary_format_string.py index a363b43f..a539d213 100644 --- a/libcst/codemod/commands/unnecessary_format_string.py +++ b/libcst/codemod/commands/unnecessary_format_string.py @@ -10,7 +10,9 @@ from libcst.codemod import VisitorBasedCodemodCommand class UnnecessaryFormatString(VisitorBasedCodemodCommand): - DESCRIPTION: str = "Converts f-strings which perform no formatting to regular strings." + DESCRIPTION: str = ( + "Converts f-strings which perform no formatting to regular strings." + ) @m.leave(m.FormattedString(parts=(m.FormattedStringText(),))) def _check_formatted_string( diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 6e6e55f1..74f1c174 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -28,7 +28,8 @@ class TestCodemodCLI(UnitTest): version = sys.version_info if version[0] == 3 and version[1] == 6: self.assertIn( - "ParserSyntaxError: Syntax Error @ 14:11.", rlt.stderr.decode("utf-8"), + "ParserSyntaxError: Syntax Error @ 14:11.", + rlt.stderr.decode("utf-8"), ) else: self.assertIn( diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index ba3cd700..77beac8b 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -330,8 +330,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): name = get_full_name_for_node(only_target) if name is not None: self.qualifier.append(name) - if self._qualifier_name() in self.annotations.attribute_annotations and not isinstance( - only_target, cst.Subscript + if ( + self._qualifier_name() in self.annotations.attribute_annotations + and not isinstance(only_target, cst.Subscript) ): annotation = self.annotations.attribute_annotations[ self._qualifier_name() diff --git a/libcst/helpers/_template.py b/libcst/helpers/_template.py index e3437c1d..b1be6e5c 100644 --- a/libcst/helpers/_template.py +++ b/libcst/helpers/_template.py @@ -157,7 +157,9 @@ class TemplateTransformer(cst.CSTTransformer): return self.simple_replacements[var_name].deep_clone() def leave_Annotation( - self, original_node: cst.Annotation, updated_node: cst.Annotation, + self, + original_node: cst.Annotation, + updated_node: cst.Annotation, ) -> cst.Annotation: # We can't use matchers here due to circular imports annotation = updated_node.annotation @@ -168,7 +170,9 @@ class TemplateTransformer(cst.CSTTransformer): return updated_node def leave_AssignTarget( - self, original_node: cst.AssignTarget, updated_node: cst.AssignTarget, + self, + original_node: cst.AssignTarget, + updated_node: cst.AssignTarget, ) -> cst.AssignTarget: # We can't use matchers here due to circular imports target = updated_node.target @@ -179,7 +183,9 @@ class TemplateTransformer(cst.CSTTransformer): return updated_node def leave_Param( - self, original_node: cst.Param, updated_node: cst.Param, + self, + original_node: cst.Param, + updated_node: cst.Param, ) -> cst.Param: var_name = unmangled_name(updated_node.name.value) if var_name in self.param_replacements: @@ -187,7 +193,9 @@ class TemplateTransformer(cst.CSTTransformer): return updated_node def leave_Parameters( - self, original_node: cst.Parameters, updated_node: cst.Parameters, + self, + original_node: cst.Parameters, + updated_node: cst.Parameters, ) -> cst.Parameters: # A very special case for when we use a template variable for all # function parameters. @@ -235,7 +243,9 @@ class TemplateTransformer(cst.CSTTransformer): return updated_node def leave_Expr( - self, original_node: cst.Expr, updated_node: cst.Expr, + self, + original_node: cst.Expr, + updated_node: cst.Expr, ) -> cst.BaseSmallStatement: # We can't use matchers here due to circular imports. We do a similar trick # to the above stanza handling SimpleStatementLine to support templates @@ -267,7 +277,9 @@ class TemplateTransformer(cst.CSTTransformer): return updated_node def leave_IndentedBlock( - self, original_node: cst.IndentedBlock, updated_node: cst.IndentedBlock, + self, + original_node: cst.IndentedBlock, + updated_node: cst.IndentedBlock, ) -> cst.BaseSuite: # We can't use matchers here due to circular imports. We take advantage of # the fact that a name in an indented block will be parsed as an Expr node @@ -289,7 +301,9 @@ class TemplateTransformer(cst.CSTTransformer): return updated_node def leave_Index( - self, original_node: cst.Index, updated_node: cst.Index, + self, + original_node: cst.Index, + updated_node: cst.Index, ) -> cst.BaseSlice: # We can't use matchers here due to circular imports expr = updated_node.value @@ -300,7 +314,9 @@ class TemplateTransformer(cst.CSTTransformer): return updated_node def leave_SubscriptElement( - self, original_node: cst.SubscriptElement, updated_node: cst.SubscriptElement, + self, + original_node: cst.SubscriptElement, + updated_node: cst.SubscriptElement, ) -> cst.SubscriptElement: # We can't use matchers here due to circular imports. We use the trick # similar to above stanzas where a template replacement variable will @@ -339,7 +355,8 @@ class TemplateChecker(cst.CSTVisitor): def unmangle_nodes( - tree: cst.CSTNode, template_replacements: Mapping[str, ValidReplacementType], + tree: cst.CSTNode, + template_replacements: Mapping[str, ValidReplacementType], ) -> cst.CSTNode: unmangler = TemplateTransformer(template_replacements) return ensure_type(tree.visit(unmangler), cst.CSTNode) diff --git a/libcst/helpers/tests/test_expression.py b/libcst/helpers/tests/test_expression.py index ca2d3533..f80e6082 100644 --- a/libcst/helpers/tests/test_expression.py +++ b/libcst/helpers/tests/test_expression.py @@ -35,7 +35,9 @@ class ExpressionTest(UnitTest): ) ) def test_get_full_name_for_expression( - self, input: Union[str, cst.CSTNode], output: Optional[str], + self, + input: Union[str, cst.CSTNode], + output: Optional[str], ) -> None: self.assertEqual(get_full_name_for_node(input), output) if output is None: diff --git a/libcst/helpers/tests/test_statement.py b/libcst/helpers/tests/test_statement.py index 8e38f19e..4d2009d1 100644 --- a/libcst/helpers/tests/test_statement.py +++ b/libcst/helpers/tests/test_statement.py @@ -35,7 +35,10 @@ class StatementTest(UnitTest): ) ) def test_get_absolute_module( - self, module: Optional[str], importfrom: str, output: Optional[str], + self, + module: Optional[str], + importfrom: str, + output: Optional[str], ) -> None: node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine) assert len(node.body) == 1, "Unexpected number of statements!" diff --git a/libcst/helpers/tests/test_template.py b/libcst/helpers/tests/test_template.py index 1847728d..1805fb51 100644 --- a/libcst/helpers/tests/test_template.py +++ b/libcst/helpers/tests/test_template.py @@ -63,7 +63,8 @@ class TemplateTest(UnitTest): msg=cst.SimpleString('"Somehow True is no longer True..."'), ) self.assertEqual( - self.code(statement), 'assert True, "Somehow True is no longer True..."\n', + self.code(statement), + 'assert True, "Somehow True is no longer True..."\n', ) def test_simple_expression(self) -> None: @@ -80,16 +81,20 @@ class TemplateTest(UnitTest): ), ) self.assertEqual( - self.code(expression), "one + two + (three * four)", + self.code(expression), + "one + two + (three * four)", ) def test_annotation(self) -> None: # Test that we can insert an annotation expression normally. statement = parse_template_statement( - "x: {type} = {val}", type=cst.Name("int"), val=cst.Integer("5"), + "x: {type} = {val}", + type=cst.Name("int"), + val=cst.Integer("5"), ) self.assertEqual( - self.code(statement), "x: int = 5\n", + self.code(statement), + "x: int = 5\n", ) # Test that we can insert an annotation node as a special case. @@ -99,7 +104,8 @@ class TemplateTest(UnitTest): val=cst.Integer("5"), ) self.assertEqual( - self.code(statement), "x: int = 5\n", + self.code(statement), + "x: int = 5\n", ) def test_assign_target(self) -> None: @@ -111,7 +117,8 @@ class TemplateTest(UnitTest): val=cst.Integer("5"), ) self.assertEqual( - self.code(statement), "first = second = 5\n", + self.code(statement), + "first = second = 5\n", ) # Test that we can insert an assignment target as a special case. @@ -122,54 +129,69 @@ class TemplateTest(UnitTest): val=cst.Integer("5"), ) self.assertEqual( - self.code(statement), "first = second = 5\n", + self.code(statement), + "first = second = 5\n", ) def test_parameters(self) -> None: # Test that we can insert a parameter into a function def normally. statement = parse_template_statement( - "def foo({arg}): pass", arg=cst.Name("bar"), + "def foo({arg}): pass", + arg=cst.Name("bar"), ) self.assertEqual( - self.code(statement), "def foo(bar): pass\n", + self.code(statement), + "def foo(bar): pass\n", ) # Test that we can insert a parameter as a special case. statement = parse_template_statement( - "def foo({arg}): pass", arg=cst.Param(cst.Name("bar")), + "def foo({arg}): pass", + arg=cst.Param(cst.Name("bar")), ) self.assertEqual( - self.code(statement), "def foo(bar): pass\n", + self.code(statement), + "def foo(bar): pass\n", ) # Test that we can insert a parameters list as a special case. statement = parse_template_statement( "def foo({args}): pass", - args=cst.Parameters((cst.Param(cst.Name("bar")),),), + args=cst.Parameters( + (cst.Param(cst.Name("bar")),), + ), ) self.assertEqual( - self.code(statement), "def foo(bar): pass\n", + self.code(statement), + "def foo(bar): pass\n", ) # Test filling out multiple parameters statement = parse_template_statement( "def foo({args}): pass", args=cst.Parameters( - params=(cst.Param(cst.Name("bar")), cst.Param(cst.Name("baz")),), + params=( + cst.Param(cst.Name("bar")), + cst.Param(cst.Name("baz")), + ), star_kwarg=cst.Param(cst.Name("rest")), ), ) self.assertEqual( - self.code(statement), "def foo(bar, baz, **rest): pass\n", + self.code(statement), + "def foo(bar, baz, **rest): pass\n", ) def test_args(self) -> None: # Test that we can insert an argument into a function call normally. statement = parse_template_expression( - "foo({arg1}, {arg2})", arg1=cst.Name("bar"), arg2=cst.Name("baz"), + "foo({arg1}, {arg2})", + arg1=cst.Name("bar"), + arg2=cst.Name("baz"), ) self.assertEqual( - self.code(statement), "foo(bar, baz)", + self.code(statement), + "foo(bar, baz)", ) # Test that we can insert an argument as a special case. @@ -179,7 +201,8 @@ class TemplateTest(UnitTest): arg2=cst.Arg(cst.Name("baz")), ) self.assertEqual( - self.code(statement), "foo(bar, baz)", + self.code(statement), + "foo(bar, baz)", ) def test_statement(self) -> None: @@ -188,13 +211,19 @@ class TemplateTest(UnitTest): module = parse_template_module( "{statement1}\n{statement2}\n{statement3}\n", statement1=cst.If( - test=cst.Name("foo"), body=cst.SimpleStatementSuite((cst.Pass(),),), + test=cst.Name("foo"), + body=cst.SimpleStatementSuite( + (cst.Pass(),), + ), + ), + statement2=cst.SimpleStatementLine( + (cst.Expr(cst.Call(cst.Name("bar"))),), ), - statement2=cst.SimpleStatementLine((cst.Expr(cst.Call(cst.Name("bar"))),),), statement3=cst.Pass(), ) self.assertEqual( - module.code, "if foo: pass\nbar()\npass\n", + module.code, + "if foo: pass\nbar()\npass\n", ) def test_suite(self) -> None: @@ -202,91 +231,126 @@ class TemplateTest(UnitTest): # spot accepting a suite. module = parse_template_module( "if x is True: {suite}\n", - suite=cst.SimpleStatementSuite(body=(cst.Pass(),),), + suite=cst.SimpleStatementSuite( + body=(cst.Pass(),), + ), ) self.assertEqual( - module.code, "if x is True: pass\n", + module.code, + "if x is True: pass\n", ) module = parse_template_module( "if x is True: {suite}\n", - suite=cst.IndentedBlock(body=(cst.SimpleStatementLine((cst.Pass(),),),),), + suite=cst.IndentedBlock( + body=( + cst.SimpleStatementLine( + (cst.Pass(),), + ), + ), + ), ) self.assertEqual( - module.code, "if x is True:\n pass\n", + module.code, + "if x is True:\n pass\n", ) module = parse_template_module( "if x is True:\n {suite}\n", - suite=cst.SimpleStatementSuite(body=(cst.Pass(),),), + suite=cst.SimpleStatementSuite( + body=(cst.Pass(),), + ), ) self.assertEqual( - module.code, "if x is True: pass\n", + module.code, + "if x is True: pass\n", ) module = parse_template_module( "if x is True:\n {suite}\n", - suite=cst.IndentedBlock(body=(cst.SimpleStatementLine((cst.Pass(),),),),), + suite=cst.IndentedBlock( + body=( + cst.SimpleStatementLine( + (cst.Pass(),), + ), + ), + ), ) self.assertEqual( - module.code, "if x is True:\n pass\n", + module.code, + "if x is True:\n pass\n", ) def test_subscript(self) -> None: # Test that we can insert various subscript slices into an # acceptible spot. expression = parse_template_expression( - "Optional[{type}]", type=cst.Name("int"), + "Optional[{type}]", + type=cst.Name("int"), ) self.assertEqual( - self.code(expression), "Optional[int]", + self.code(expression), + "Optional[int]", ) expression = parse_template_expression( - "Tuple[{type1}, {type2}]", type1=cst.Name("int"), type2=cst.Name("str"), + "Tuple[{type1}, {type2}]", + type1=cst.Name("int"), + type2=cst.Name("str"), ) self.assertEqual( - self.code(expression), "Tuple[int, str]", + self.code(expression), + "Tuple[int, str]", ) expression = parse_template_expression( - "Optional[{type}]", type=cst.Index(cst.Name("int")), + "Optional[{type}]", + type=cst.Index(cst.Name("int")), ) self.assertEqual( - self.code(expression), "Optional[int]", + self.code(expression), + "Optional[int]", ) expression = parse_template_expression( - "Optional[{type}]", type=cst.SubscriptElement(cst.Index(cst.Name("int"))), + "Optional[{type}]", + type=cst.SubscriptElement(cst.Index(cst.Name("int"))), ) self.assertEqual( - self.code(expression), "Optional[int]", + self.code(expression), + "Optional[int]", ) expression = parse_template_expression( - "foo[{slice}]", slice=cst.Slice(cst.Integer("5"), cst.Integer("6")), + "foo[{slice}]", + slice=cst.Slice(cst.Integer("5"), cst.Integer("6")), ) self.assertEqual( - self.code(expression), "foo[5:6]", + self.code(expression), + "foo[5:6]", ) expression = parse_template_expression( "foo[{slice}]", slice=cst.SubscriptElement(cst.Slice(cst.Integer("5"), cst.Integer("6"))), ) self.assertEqual( - self.code(expression), "foo[5:6]", + self.code(expression), + "foo[5:6]", ) expression = parse_template_expression( - "foo[{slice}]", slice=cst.Slice(cst.Integer("5"), cst.Integer("6")), + "foo[{slice}]", + slice=cst.Slice(cst.Integer("5"), cst.Integer("6")), ) self.assertEqual( - self.code(expression), "foo[5:6]", + self.code(expression), + "foo[5:6]", ) expression = parse_template_expression( "foo[{slice}]", slice=cst.SubscriptElement(cst.Slice(cst.Integer("5"), cst.Integer("6"))), ) self.assertEqual( - self.code(expression), "foo[5:6]", + self.code(expression), + "foo[5:6]", ) expression = parse_template_expression( @@ -295,7 +359,8 @@ class TemplateTest(UnitTest): slice2=cst.Index(cst.Integer("7")), ) self.assertEqual( - self.code(expression), "foo[5:6, 7]", + self.code(expression), + "foo[5:6, 7]", ) expression = parse_template_expression( "foo[{slice1}, {slice2}]", @@ -303,20 +368,25 @@ class TemplateTest(UnitTest): slice2=cst.SubscriptElement(cst.Index(cst.Integer("7"))), ) self.assertEqual( - self.code(expression), "foo[5:6, 7]", + self.code(expression), + "foo[5:6, 7]", ) def test_decorators(self) -> None: # Test that we can special-case decorators when needed. statement = parse_template_statement( - "@{decorator}\ndef foo(): pass\n", decorator=cst.Name("bar"), + "@{decorator}\ndef foo(): pass\n", + decorator=cst.Name("bar"), ) self.assertEqual( - self.code(statement), "@bar\ndef foo(): pass\n", + self.code(statement), + "@bar\ndef foo(): pass\n", ) statement = parse_template_statement( - "@{decorator}\ndef foo(): pass\n", decorator=cst.Decorator(cst.Name("bar")), + "@{decorator}\ndef foo(): pass\n", + decorator=cst.Decorator(cst.Name("bar")), ) self.assertEqual( - self.code(statement), "@bar\ndef foo(): pass\n", + self.code(statement), + "@bar\ndef foo(): pass\n", ) diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 0cf281c0..6f1a1c88 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -706,10 +706,10 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): return self._matcher def __or__(self, other: object) -> NoReturn: - raise Exception(f"AtLeastN cannot be used in a OneOf matcher") + raise Exception("AtLeastN cannot be used in a OneOf matcher") def __and__(self, other: object) -> NoReturn: - raise Exception(f"AtLeastN cannot be used in an AllOf matcher") + raise Exception("AtLeastN cannot be used in an AllOf matcher") def __invert__(self) -> NoReturn: raise Exception("Cannot invert an AtLeastN matcher!") @@ -808,10 +808,10 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): return self._matcher def __or__(self, other: object) -> NoReturn: - raise Exception(f"AtMostN cannot be used in a OneOf matcher") + raise Exception("AtMostN cannot be used in a OneOf matcher") def __and__(self, other: object) -> NoReturn: - raise Exception(f"AtMostN cannot be used in an AllOf matcher") + raise Exception("AtMostN cannot be used in an AllOf matcher") def __invert__(self) -> NoReturn: raise Exception("Cannot invert an AtMostN matcher!") diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index 1232191a..301e675a 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -193,8 +193,8 @@ def _verify_parameter_annotations( raise MatchDecoratorMismatch( meth.__qualname__, f"@{decorator_name} can be called with {match.__name__} " - + f"but the decorated function parameter annotations do " - + f"not include this type.", + + "but the decorated function parameter annotations do " + + "not include this type.", ) diff --git a/libcst/matchers/tests/test_matchers_with_metadata.py b/libcst/matchers/tests/test_matchers_with_metadata.py index 4e859973..fcc86f66 100644 --- a/libcst/matchers/tests/test_matchers_with_metadata.py +++ b/libcst/matchers/tests/test_matchers_with_metadata.py @@ -46,7 +46,8 @@ class MatchersMetadataTest(UnitTest): m.Name( value="foo", metadata=m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 0), (1, 3)), + meta.PositionProvider, + self._make_coderange((1, 0), (1, 3)), ), ), metadata_resolver=wrapper, @@ -59,10 +60,12 @@ class MatchersMetadataTest(UnitTest): node, m.BinaryOperation( left=m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 0), (1, 1)), + meta.PositionProvider, + self._make_coderange((1, 0), (1, 1)), ), right=m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 4), (1, 5)), + meta.PositionProvider, + self._make_coderange((1, 4), (1, 5)), ), ), metadata_resolver=wrapper, @@ -78,7 +81,8 @@ class MatchersMetadataTest(UnitTest): m.Name( value="foo", metadata=m.MatchMetadata( - meta.PositionProvider, self._make_coderange((2, 0), (2, 3)), + meta.PositionProvider, + self._make_coderange((2, 0), (2, 3)), ), ), metadata_resolver=wrapper, @@ -91,10 +95,12 @@ class MatchersMetadataTest(UnitTest): node, m.BinaryOperation( left=m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 0), (1, 1)), + meta.PositionProvider, + self._make_coderange((1, 0), (1, 1)), ), right=m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 4), (1, 5)), + meta.PositionProvider, + self._make_coderange((1, 4), (1, 5)), ), ), metadata_resolver=wrapper, @@ -201,10 +207,12 @@ class MatchersMetadataTest(UnitTest): left=m.Name( metadata=m.OneOf( m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 0), (1, 1)), + meta.PositionProvider, + self._make_coderange((1, 0), (1, 1)), ), m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 0), (1, 2)), + meta.PositionProvider, + self._make_coderange((1, 0), (1, 2)), ), ) ) @@ -215,10 +223,12 @@ class MatchersMetadataTest(UnitTest): left=m.Integer( metadata=m.OneOf( m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 0), (1, 1)), + meta.PositionProvider, + self._make_coderange((1, 0), (1, 1)), ), m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 0), (1, 2)), + meta.PositionProvider, + self._make_coderange((1, 0), (1, 2)), ), ) ) @@ -233,7 +243,8 @@ class MatchersMetadataTest(UnitTest): left=m.Name( metadata=m.AllOf( m.MatchMetadata( - meta.PositionProvider, self._make_coderange((1, 0), (1, 1)), + meta.PositionProvider, + self._make_coderange((1, 0), (1, 1)), ), m.MatchMetadata( meta.ExpressionContextProvider, meta.ExpressionContext.LOAD diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index 21f3a68b..b6ee1a35 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -13,7 +13,7 @@ from libcst.metadata.base_provider import BatchableMetadataProvider class ExpressionContext(Enum): """Used in :class:`ExpressionContextProvider` to represent context of a variable - reference. """ + reference.""" #: Load the value of a variable reference. #: diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 2df82047..e1c7f196 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -398,7 +398,7 @@ class Scope(abc.ABC): def get_qualified_names_for( self, node: Union[str, cst.CSTNode] ) -> Collection[QualifiedName]: - """ Get all :class:`~libcst.metadata.QualifiedName` in current scope given a + """Get all :class:`~libcst.metadata.QualifiedName` in current scope given a :class:`~libcst.CSTNode`. The source of a qualified name can be either :attr:`QualifiedNameSource.IMPORT`, :attr:`QualifiedNameSource.BUILTIN` or :attr:`QualifiedNameSource.LOCAL`. diff --git a/libcst/metadata/tests/test_reentrant_codegen.py b/libcst/metadata/tests/test_reentrant_codegen.py index da6b3990..8e6abeab 100644 --- a/libcst/metadata/tests/test_reentrant_codegen.py +++ b/libcst/metadata/tests/test_reentrant_codegen.py @@ -93,7 +93,9 @@ class ExperimentalReentrantCodegenProviderTest(UnitTest): self.assertEqual(codegen_partial.get_original_module_code(), old_module) self.assertEqual(codegen_partial.get_modified_module_code(new_node), new_module) - def test_byte_conversion(self,) -> None: + def test_byte_conversion( + self, + ) -> None: module_bytes = "fn()\n".encode("utf-16") mw = MetadataWrapper( cst.parse_module("fn()\n", cst.PartialParserConfig(encoding="utf-16")) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index a35b444e..d6337f3a 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -152,7 +152,12 @@ class ScopeProviderTest(UnitTest): self.assertEqual(scopes[alias], scope_of_module) for idx, in_scopes in enumerate( - [["foo", "foo.bar"], ["fizzbuzz"], ["a", "a.b", "a.b.c"], ["g"],] + [ + ["foo", "foo.bar"], + ["fizzbuzz"], + ["a", "a.b", "a.b.c"], + ["g"], + ] ): for in_scope in in_scopes: self.assertEqual( diff --git a/libcst/metadata/wrapper.py b/libcst/metadata/wrapper.py index 208aacfc..6c31b17f 100644 --- a/libcst/metadata/wrapper.py +++ b/libcst/metadata/wrapper.py @@ -29,11 +29,11 @@ from libcst.metadata.base_provider import BatchableMetadataProvider if TYPE_CHECKING: from libcst._nodes.base import CSTNode # noqa: F401 from libcst._nodes.module import Module # noqa: F401 + from libcst._visitors import CSTVisitorT # noqa: F401 from libcst.metadata.base_provider import ( # noqa: F401 BaseMetadataProvider, ProviderT, ) - from libcst._visitors import CSTVisitorT # noqa: F401 _T = TypeVar("_T") diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 4c18643a..11fd7f8d 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -65,7 +65,9 @@ class TypeVerificationVisitor(cst.CSTVisitor): # remove this if condition when the type issues are fixed. if not any( node.deep_equals(name) and tup == _tup - for (name, _tup) in {(cst.Name("i"), (17, 21, 17, 22)),} + for (name, _tup) in { + (cst.Name("i"), (17, 21, 17, 22)), + } ): self.test.assertIn( tup, @@ -116,8 +118,7 @@ class PyreIntegrationTest(UnitTest): if __name__ == "__main__": - """Run this script directly to generate pyre data for test suite (tests/pyre/*.py) - """ + """Run this script directly to generate pyre data for test suite (tests/pyre/*.py)""" print("start pyre server") stdout: str stderr: str diff --git a/libcst/tests/test_type_enforce.py b/libcst/tests/test_type_enforce.py index b5525554..0779ec37 100644 --- a/libcst/tests/test_type_enforce.py +++ b/libcst/tests/test_type_enforce.py @@ -5,6 +5,7 @@ from types import MappingProxyType from typing import ( + TYPE_CHECKING, Any, AsyncGenerator, ClassVar, @@ -28,6 +29,10 @@ from libcst._type_enforce import is_value_of_type from libcst.testing.utils import UnitTest, data_provider +if TYPE_CHECKING: + from collections import Counter # noqa: F401 + + class MyExampleClass: pass @@ -97,7 +102,7 @@ class TypeEnforcementTest(UnitTest): (NamedTupleSubclass("foo", 123), NamedTupleSubclass), # forward references should just pass for anything # pyre-ignore Pyre doesn't think a forwardref is a typevar. - (MyExampleClass(), Optional["NodeUser"]), + (MyExampleClass(), Optional["Counter"]), # class variables get unwrapped, and behave like their underlying type (MyExampleClass(), ClassVar[MyExampleClass]), # dicts work diff --git a/pyproject.toml b/pyproject.toml index 37780f85..c55f8d4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,4 +8,3 @@ include_trailing_comma = true force_grid_wrap = 0 lines_after_imports = 2 combine_as_imports = true -not_skip = "__init__.py" diff --git a/requirements-dev.txt b/requirements-dev.txt index 44925b02..c0ccdbe1 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,14 +1,14 @@ -black==19.10b0 -codecov==2.1.4 -coverage==4.5.4 -flake8==3.7.8 -hypothesis==4.36.0 -hypothesmith==0.0.4 +black>=19.10b0 +codecov>=2.1.4 +coverage>=4.5.4 +flake8>=3.7.8 +hypothesis>=4.36.0 +hypothesmith>=0.0.4 git+https://github.com/jimmylai/sphinx.git@slots_type_annotation -isort==4.3.20 -jupyter==1.0.0 -nbsphinx==0.4.2 +isort>=4.3.20 +jupyter>=1.0.0 +nbsphinx>=0.4.2 pyre-check==0.0.41 -sphinx-rtd-theme==0.4.3 -prompt-toolkit==2.0.9 -tox==3.18.1 +sphinx-rtd-theme>=0.4.3 +prompt-toolkit>=2.0.9 +tox>=3.18.1 diff --git a/requirements.txt b/requirements.txt index 752481bb..cf8ff05c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -dataclasses==0.6.0; python_version < '3.7' -typing_extensions==3.7.4.2 -typing_inspect==0.4.0 -pyyaml==5.2 +dataclasses>=0.6.0; python_version < '3.7' +typing_extensions>=3.7.4.2 +typing_inspect>=0.4.0 +pyyaml>=5.2 diff --git a/setup.py b/setup.py index f92086b9..4dd024dd 100644 --- a/setup.py +++ b/setup.py @@ -46,27 +46,9 @@ setuptools.setup( }, test_suite="libcst", python_requires=">=3.6", - install_requires=[ - "dataclasses; python_version < '3.7'", - "typing_extensions >= 3.7.4.2", - "typing_inspect >= 0.4.0", - "pyyaml >= 5.2", - ], + install_requires=[dep.strip() for dep in open("requirements.txt").readlines()], extras_require={ - "dev": [ - "black", - "codecov", - "coverage", - "hypothesis >= 4.36.0", - "hypothesmith >= 0.0.4", - "isort", - "flake8", - "jupyter", - "nbsphinx", - "pyre-check", - "Sphinx", - "sphinx-rtd-theme", - ] + "dev": [dep.strip() for dep in open("requirements-dev.txt").readlines() if "=" in dep], }, classifiers=[ "License :: OSI Approved :: MIT License", diff --git a/tox.ini b/tox.ini index e3f31025..5e8b21bd 100644 --- a/tox.ini +++ b/tox.ini @@ -14,7 +14,7 @@ deps = -rrequirements-dev.txt commands = flake8 {posargs} - isort --check-only -rc {posargs:.} + isort --check-only {posargs:.} black --check {posargs:libcst/} [testenv:docs] @@ -30,7 +30,7 @@ deps = -rrequirements-dev.txt commands = flake8 {posargs} - isort -y -q -rc {posargs:.} + isort -q {posargs:.} black {posargs:libcst/} [testenv:coverage] From 3e63f914e530dc4d5bec46db10f66ca01cba42ba Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Sun, 6 Sep 2020 01:29:12 -0700 Subject: [PATCH 066/632] [README] update Codecov badge (#382) --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 22688034..6d477eae 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python :target: https://circleci.com/gh/Instagram/LibCST/tree/master :alt: CircleCI -.. |codecov-badge| image:: http://codecov.io/gh/Instagram/LibCST/coverage.svg?branch=master +.. |codecov-badge| image:: https://codecov.io/gh/Instagram/LibCST/branch/master/graph/badge.svg :target: https://codecov.io/gh/Instagram/LibCST/branch/master :alt: CodeCov From 6a02e2e9952b0a4b74208c0c379d416965ac3fbb Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Wed, 9 Sep 2020 17:33:49 -0700 Subject: [PATCH 067/632] [CI] add Fixit to tox -e lint (#386) --- .fixit.config.yaml | 10 ++++++++++ libcst/_parser/parso/pgen2/generator.py | 6 ++++-- libcst/_parser/parso/python/tokenize.py | 4 ++-- libcst/_parser/parso/tests/test_utils.py | 2 +- libcst/_parser/parso/utils.py | 6 ++++-- libcst/_parser/tests/test_detect_config.py | 2 +- libcst/codemod/_cli.py | 2 +- libcst/codemod/commands/ensure_import_present.py | 2 +- libcst/codemod/commands/remove_unused_imports.py | 2 +- .../visitors/tests/test_gather_unused_imports.py | 4 ++-- libcst/matchers/_matcher_base.py | 12 ++++++++---- requirements-dev.txt | 1 + tox.ini | 2 ++ 13 files changed, 38 insertions(+), 17 deletions(-) create mode 100644 .fixit.config.yaml diff --git a/.fixit.config.yaml b/.fixit.config.yaml new file mode 100644 index 00000000..077e120c --- /dev/null +++ b/.fixit.config.yaml @@ -0,0 +1,10 @@ +block_list_patterns: +- '@generated' +- '@nolint' +block_list_rules: ["UseFstringRule", "CompareSingletonPrimitivesByIsRule"] +fixture_dir: ./fixtures +formatter: ["black", "-"] +packages: +- fixit.rules +repo_root: libcst +rule_config: {} diff --git a/libcst/_parser/parso/pgen2/generator.py b/libcst/_parser/parso/pgen2/generator.py index c08b164f..546cc85f 100644 --- a/libcst/_parser/parso/pgen2/generator.py +++ b/libcst/_parser/parso/pgen2/generator.py @@ -321,8 +321,10 @@ def _calculate_tree_traversal(nonterminal_to_dfas): ] ) raise ValueError( - "Rule %s is ambiguous; given a %s token, we " - "can't determine if we should evaluate %s or %s." + ( + "Rule %s is ambiguous; given a %s token, we " + + "can't determine if we should evaluate %s or %s." + ) % ((dfa_state.from_rule, transition) + tuple(choices)) ) transitions[transition] = DFAPlan(next_dfa, pushes) diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index 4b1905cb..0f60472e 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -108,7 +108,7 @@ def _all_string_prefixes( if version_info < (3, 0) or version_info >= (3, 3): valid_string_prefixes.append("u") - result = set([""]) + result = {""} if version_info >= (3, 6) and include_fstring: f = ["f", "fr"] if only_fstring: @@ -326,7 +326,7 @@ class PythonToken(Token): ) -class FStringNode(object): +class FStringNode: def __init__(self, quote): self.quote = quote self.parentheses_count = 0 diff --git a/libcst/_parser/parso/tests/test_utils.py b/libcst/_parser/parso/tests/test_utils.py index 03ba9a68..17bdb8a0 100644 --- a/libcst/_parser/parso/tests/test_utils.py +++ b/libcst/_parser/parso/tests/test_utils.py @@ -49,7 +49,7 @@ class ParsoUtilsTest(UnitTest): def test_python_bytes_to_unicode_unicode_text(self): source = ( b"# vim: fileencoding=utf-8\n" - b"# \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\n" + + b"# \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\n" ) actual = python_bytes_to_unicode(source) expected = source.decode("utf-8") diff --git a/libcst/_parser/parso/utils.py b/libcst/_parser/parso/utils.py index a6388040..27b93731 100644 --- a/libcst/_parser/parso/utils.py +++ b/libcst/_parser/parso/utils.py @@ -184,8 +184,10 @@ def _parse_version(version: str) -> PythonVersionInfo: match = re.match(r"(\d+)(?:\.(\d+)(?:\.\d+)?)?$", version) if match is None: raise ValueError( - "The given version is not in the right format. " - 'Use something like "3.2" or "3".' + ( + "The given version is not in the right format. " + + 'Use something like "3.2" or "3".' + ) ) major = int(match.group(1)) diff --git a/libcst/_parser/tests/test_detect_config.py b/libcst/_parser/tests/test_detect_config.py index a46106dc..b17c9fe5 100644 --- a/libcst/_parser/tests/test_detect_config.py +++ b/libcst/_parser/tests/test_detect_config.py @@ -284,7 +284,7 @@ class TestDetectConfig(UnitTest): "future_imports_in_mixed_position": { "source": ( b"from __future__ import a, b\nimport os\n" - b"from __future__ import c\n" + + b"from __future__ import c\n" ), "partial": PartialParserConfig(python_version="3.7"), "detect_trailing_newline": True, diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 73e443ec..d4ced900 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -585,7 +585,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 # Ensure that we have no duplicates, otherwise we might get race conditions # on write. - files = sorted(list(set(os.path.abspath(f) for f in files))) + files = sorted(list({os.path.abspath(f) for f in files})) total = len(files) progress = Progress(enabled=not hide_progress, total=total) diff --git a/libcst/codemod/commands/ensure_import_present.py b/libcst/codemod/commands/ensure_import_present.py index 04b1d129..c2ec033f 100644 --- a/libcst/codemod/commands/ensure_import_present.py +++ b/libcst/codemod/commands/ensure_import_present.py @@ -44,7 +44,7 @@ class EnsureImportPresentCommand(MagicArgsCodemodCommand): metavar="ALIAS", help=( "Alias that will be used for the imported module or entity. If left " - "empty, no alias will be applied." + + "empty, no alias will be applied." ), type=str, default=None, diff --git a/libcst/codemod/commands/remove_unused_imports.py b/libcst/codemod/commands/remove_unused_imports.py index 1f23a264..741f9a46 100644 --- a/libcst/codemod/commands/remove_unused_imports.py +++ b/libcst/codemod/commands/remove_unused_imports.py @@ -30,7 +30,7 @@ class RemoveUnusedImportsCommand(VisitorBasedCodemodCommand): DESCRIPTION: str = ( "Remove all imports that are not used in a file. " - "Note: only considers the file in isolation. " + + "Note: only considers the file in isolation. " ) METADATA_DEPENDENCIES: Tuple[ProviderT] = (PositionProvider,) diff --git a/libcst/codemod/visitors/tests/test_gather_unused_imports.py b/libcst/codemod/visitors/tests/test_gather_unused_imports.py index bd63f26d..5fb3cba2 100644 --- a/libcst/codemod/visitors/tests/test_gather_unused_imports.py +++ b/libcst/codemod/visitors/tests/test_gather_unused_imports.py @@ -17,10 +17,10 @@ class TestGatherUnusedImportsVisitor(UnitTest): mod.resolve_many(GatherUnusedImportsVisitor.METADATA_DEPENDENCIES) instance = GatherUnusedImportsVisitor(CodemodContext(wrapper=mod)) mod.visit(instance) - return set( + return { alias.evaluated_alias or alias.evaluated_name for alias, _ in instance.unused_imports - ) + } def test_no_imports(self) -> None: imports = self.gather_imports( diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 6f1a1c88..532cb53d 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -352,8 +352,10 @@ class _ExtractMatchingNode(Generic[_MatcherT]): # same node, or none of them. It makes more sense to move the SaveMatchedNode # up to wrap the AllOf. raise Exception( - "Cannot use AllOf with SavedMatchedNode children! Instead, you should " - "use SaveMatchedNode(AllOf(options...))." + ( + "Cannot use AllOf with SavedMatchedNode children! Instead, you should " + + "use SaveMatchedNode(AllOf(options...))." + ) ) def __getattr__(self, key: str) -> object: @@ -366,8 +368,10 @@ class _ExtractMatchingNode(Generic[_MatcherT]): # This doesn't make sense. We don't want to capture a node only if it # doesn't match, since this will never capture anything. raise Exception( - "Cannot invert a SaveMatchedNode. Instead you should wrap SaveMatchedNode " - "around your inversion itself" + ( + "Cannot invert a SaveMatchedNode. Instead you should wrap SaveMatchedNode " + + "around your inversion itself" + ) ) def __repr__(self) -> str: diff --git a/requirements-dev.txt b/requirements-dev.txt index c0ccdbe1..84fd1d13 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,7 @@ black>=19.10b0 codecov>=2.1.4 coverage>=4.5.4 +fixit>=0.1.0 flake8>=3.7.8 hypothesis>=4.36.0 hypothesmith>=0.0.4 diff --git a/tox.ini b/tox.ini index 5e8b21bd..a9947c2a 100644 --- a/tox.ini +++ b/tox.ini @@ -16,6 +16,7 @@ commands = flake8 {posargs} isort --check-only {posargs:.} black --check {posargs:libcst/} + python3 -m fixit.cli.run_rules [testenv:docs] deps = @@ -32,6 +33,7 @@ commands = flake8 {posargs} isort -q {posargs:.} black {posargs:libcst/} + python3 -m fixit.cli.apply_fix [testenv:coverage] deps = From 6ae25834bbcebb79d08c4516c71569033593b4d7 Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Thu, 10 Sep 2020 21:09:59 +0300 Subject: [PATCH 068/632] Implement TypeOf matcher (#384) * Implement TypeOf matcher * Satisfy the type checker * Expand the test case * Fix the annotation of _raw_options * Add documentation... --- docs/source/matchers.rst | 1 + libcst/codegen/gen_matcher_classes.py | 11 ++- libcst/matchers/__init__.py | 58 +++++++++------- libcst/matchers/_matcher_base.py | 94 +++++++++++++++++++++++++- libcst/matchers/tests/test_matchers.py | 84 +++++++++++++++++++++++ 5 files changed, 218 insertions(+), 30 deletions(-) diff --git a/docs/source/matchers.rst b/docs/source/matchers.rst index 37398f40..eac6faa9 100644 --- a/docs/source/matchers.rst +++ b/docs/source/matchers.rst @@ -145,6 +145,7 @@ when calling :func:`~libcst.matchers.matches` or using decorators. .. autoclass:: libcst.matchers.OneOf .. autoclass:: libcst.matchers.AllOf +.. autoclass:: libcst.matchers.TypeOf .. autofunction:: libcst.matchers.DoesNotMatch .. autoclass:: libcst.matchers.MatchIfTrue .. autofunction:: libcst.matchers.MatchRegex diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index 5c6a550d..b0657890 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -456,14 +456,13 @@ generated_code.append("# LICENSE file in the root directory of this source tree. generated_code.append("") generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes") -generated_code.append("from abc import ABC") generated_code.append("from dataclasses import dataclass") generated_code.append("from typing import Callable, Sequence, Union") generated_code.append("from typing_extensions import Literal") generated_code.append("import libcst as cst") generated_code.append("") generated_code.append( - "from libcst.matchers._matcher_base import BaseMatcherNode, DoNotCareSentinel, DoNotCare, OneOf, AllOf, DoesNotMatch, MatchIfTrue, MatchRegex, MatchMetadata, MatchMetadataIfTrue, ZeroOrMore, AtLeastN, ZeroOrOne, AtMostN, SaveMatchedNode, extract, extractall, findall, matches, replace" + "from libcst.matchers._matcher_base import AbstractBaseMatcherNodeMeta, BaseMatcherNode, DoNotCareSentinel, DoNotCare, TypeOf, OneOf, AllOf, DoesNotMatch, MatchIfTrue, MatchRegex, MatchMetadata, MatchMetadataIfTrue, ZeroOrMore, AtLeastN, ZeroOrOne, AtMostN, SaveMatchedNode, extract, extractall, findall, matches, replace" ) all_exports.update( [ @@ -477,6 +476,7 @@ all_exports.update( "MatchRegex", "MatchMetadata", "MatchMetadataIfTrue", + "TypeOf", "ZeroOrMore", "AtLeastN", "ZeroOrOne", @@ -504,10 +504,15 @@ all_exports.update( ] ) +generated_code.append("") +generated_code.append("") +generated_code.append("class _NodeABC(metaclass=AbstractBaseMatcherNodeMeta):") +generated_code.append(" __slots__ = ()") + for base in typeclasses: generated_code.append("") generated_code.append("") - generated_code.append(f"class {base.__name__}(ABC):") + generated_code.append(f"class {base.__name__}(_NodeABC):") generated_code.append(" pass") all_exports.add(base.__name__) diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 8bd9f6b6..3b2d9077 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -5,7 +5,6 @@ # This file was generated by libcst.codegen.gen_matcher_classes -from abc import ABC from dataclasses import dataclass from typing import Callable, Sequence, Union @@ -14,6 +13,7 @@ from typing_extensions import Literal import libcst as cst from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit from libcst.matchers._matcher_base import ( + AbstractBaseMatcherNodeMeta, AllOf, AtLeastN, AtMostN, @@ -27,6 +27,7 @@ from libcst.matchers._matcher_base import ( MatchRegex, OneOf, SaveMatchedNode, + TypeOf, ZeroOrMore, ZeroOrOne, extract, @@ -42,103 +43,107 @@ from libcst.matchers._visitors import ( ) -class BaseAssignTargetExpression(ABC): +class _NodeABC(metaclass=AbstractBaseMatcherNodeMeta): + __slots__ = () + + +class BaseAssignTargetExpression(_NodeABC): pass -class BaseAugOp(ABC): +class BaseAugOp(_NodeABC): pass -class BaseBinaryOp(ABC): +class BaseBinaryOp(_NodeABC): pass -class BaseBooleanOp(ABC): +class BaseBooleanOp(_NodeABC): pass -class BaseComp(ABC): +class BaseComp(_NodeABC): pass -class BaseCompOp(ABC): +class BaseCompOp(_NodeABC): pass -class BaseCompoundStatement(ABC): +class BaseCompoundStatement(_NodeABC): pass -class BaseDelTargetExpression(ABC): +class BaseDelTargetExpression(_NodeABC): pass -class BaseDict(ABC): +class BaseDict(_NodeABC): pass -class BaseDictElement(ABC): +class BaseDictElement(_NodeABC): pass -class BaseElement(ABC): +class BaseElement(_NodeABC): pass -class BaseExpression(ABC): +class BaseExpression(_NodeABC): pass -class BaseFormattedStringContent(ABC): +class BaseFormattedStringContent(_NodeABC): pass -class BaseList(ABC): +class BaseList(_NodeABC): pass -class BaseMetadataProvider(ABC): +class BaseMetadataProvider(_NodeABC): pass -class BaseNumber(ABC): +class BaseNumber(_NodeABC): pass -class BaseParenthesizableWhitespace(ABC): +class BaseParenthesizableWhitespace(_NodeABC): pass -class BaseSet(ABC): +class BaseSet(_NodeABC): pass -class BaseSimpleComp(ABC): +class BaseSimpleComp(_NodeABC): pass -class BaseSlice(ABC): +class BaseSlice(_NodeABC): pass -class BaseSmallStatement(ABC): +class BaseSmallStatement(_NodeABC): pass -class BaseStatement(ABC): +class BaseStatement(_NodeABC): pass -class BaseString(ABC): +class BaseString(_NodeABC): pass -class BaseSuite(ABC): +class BaseSuite(_NodeABC): pass -class BaseUnaryOp(ABC): +class BaseUnaryOp(_NodeABC): pass @@ -13242,6 +13247,7 @@ __all__ = [ "TrailingWhitespace", "Try", "Tuple", + "TypeOf", "UnaryOperation", "While", "With", diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 532cb53d..70a9340a 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -7,12 +7,14 @@ import collections.abc import copy import inspect import re +from abc import ABCMeta from dataclasses import dataclass, fields from enum import Enum, auto from typing import ( Callable, Dict, Generic, + Iterator, List, Mapping, NoReturn, @@ -51,11 +53,26 @@ _CallableT = TypeVar("_CallableT", bound="Callable", covariant=True) _BaseMatcherNodeSelfT = TypeVar("_BaseMatcherNodeSelfT", bound="BaseMatcherNode") _OtherNodeT = TypeVar("_OtherNodeT") _MetadataValueT = TypeVar("_MetadataValueT") +_MatcherTypeT = TypeVar("_MatcherTypeT", bound=Type["BaseMatcherNode"]) +_OtherNodeMatcherTypeT = TypeVar( + "_OtherNodeMatcherTypeT", bound=Type["BaseMatcherNode"] +) _METADATA_MISSING_SENTINEL = object() +class AbstractBaseMatcherNodeMeta(ABCMeta): + """ + Metaclass that all matcher nodes uses. Allows chaining 2 node type + together with an bitwise-or operator to produce an :class:`TypeOf` + matcher. + """ + + def __or__(self, node: Type["BaseMatcherNode"]) -> "TypeOf[Type[BaseMatcherNode]]": + return TypeOf(self, node) + + class BaseMatcherNode: """ Base class that all concrete matchers subclass from. :class:`OneOf` and @@ -103,6 +120,81 @@ def DoNotCare() -> DoNotCareSentinel: return DoNotCareSentinel.DEFAULT +class TypeOf(Generic[_MatcherTypeT], BaseMatcherNode): + """ + Matcher that matches any one of the given types. Useful when you want to work + with trees where a common property might belong to more than a single type. + + For example, if you want either a binary operation or a boolean operation + where the left side has a name ``foo``:: + + m.TypeOf(m.BinaryOperation, m.BooleanOperation)(left = m.Name("foo")) + + Or you could use the shorthand, like:: + + (m.BinaryOperation | m.BooleanOperation)(left = m.Name("foo")) + + Also :class:`TypeOf` matchers can be used with initalizing in the default + state of other node matchers (without passing any extra patterns):: + + m.Name | m.SimpleString + + The will be equal to:: + + m.OneOf(m.Name(), m.SimpleString()) + """ + + def __init__(self, *options: Union[_MatcherTypeT, "TypeOf[_MatcherTypeT]"]) -> None: + actual_options: List[_MatcherTypeT] = [] + for option in options: + if isinstance(option, TypeOf): + if option.initalized: + raise Exception( + "Cannot chain an uninitalized TypeOf with an initalized one" + ) + actual_options.extend(option._raw_options) + else: + actual_options.append(option) + + self._initalized = False + self._call_items: Tuple[Tuple[object, ...], Dict[str, object]] = ((), {}) + self._raw_options: Tuple[_MatcherTypeT, ...] = tuple(actual_options) + + @property + def initalized(self) -> bool: + return self._initalized + + @property + def options(self) -> Iterator[BaseMatcherNode]: + for option in self._raw_options: + args, kwargs = self._call_items + matcher_pattern = option(*args, **kwargs) + yield matcher_pattern + + def __call__(self, *args: object, **kwargs: object) -> BaseMatcherNode: + self._initalized = True + self._call_items = (args, kwargs) + return self + + def __or__( + self, other: _OtherNodeMatcherTypeT + ) -> "TypeOf[Union[_MatcherTypeT, _OtherNodeMatcherTypeT]]": + return TypeOf[Union[_MatcherTypeT, _OtherNodeMatcherTypeT]](self, other) + + def __and__(self, other: _OtherNodeMatcherTypeT) -> NoReturn: + left, right = type(self).__name__, other.__name__ + raise TypeError( + f"TypeError: unsupported operand type(s) for &: {left!r} and {right!r}" + ) + + def __invert__(self) -> "AllOf[BaseMatcherNode]": + return AllOf(*map(DoesNotMatch, self.options)) + + def __repr__(self) -> str: + types = ", ".join(repr(option) for option in self._raw_options) + return f"TypeOf({types}, initalized = {self.initalized})" + + class OneOf(Generic[_MatcherT], BaseMatcherNode): """ Matcher that matches any one of its options. Useful when you want to match @@ -1387,7 +1479,7 @@ def _matches( return {} if isinstance(matcher, _InverseOf) else None # Now, evaluate the matcher node itself. - if isinstance(matcher, OneOf): + if isinstance(matcher, (OneOf, TypeOf)): for matcher in matcher.options: node_capture = _node_matches(node, matcher, metadata_lookup) if node_capture is not None: diff --git a/libcst/matchers/tests/test_matchers.py b/libcst/matchers/tests/test_matchers.py index ab1e5cf1..11d6b5f5 100644 --- a/libcst/matchers/tests/test_matchers.py +++ b/libcst/matchers/tests/test_matchers.py @@ -3,6 +3,8 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import dataclasses + import libcst as cst import libcst.matchers as m from libcst.matchers import matches @@ -202,6 +204,88 @@ class MatchersMatcherTest(UnitTest): ) ) + def test_type_of_matcher_true(self) -> None: + self.assertTrue(matches(cst.Name("true"), m.TypeOf(m.Name))) + self.assertTrue(matches(cst.Name("true"), m.TypeOf(m.Name)(value="true"))) + self.assertTrue(matches(cst.Name("true"), m.Name | m.Float | m.SimpleString)) + self.assertTrue( + matches(cst.SimpleString("'foo'"), m.TypeOf(m.Name, m.SimpleString)) + ) + self.assertTrue( + matches( + cst.SimpleString("'foo'"), + m.TypeOf(m.Name, m.SimpleString)(value="'foo'"), + ) + ) + with self.assertRaises(Exception): + # pyre-ignore + m.TypeOf(cst.Float)(value=1.0) | cst.Name + + with self.assertRaises(TypeError): + # pyre-ignore + m.TypeOf(cst.Float) & cst.SimpleString + + for case in ( + cst.BinaryOperation( + left=cst.Name("foo"), operator=cst.Add(), right=cst.Name("bar") + ), + cst.BooleanOperation( + left=cst.Name("foo"), operator=cst.Or(), right=cst.Name("bar") + ), + ): + self.assertTrue( + matches( + case, (m.BinaryOperation | m.BooleanOperation)(left=m.Name("foo")) + ) + ) + new_case = dataclasses.replace(case, left=case.right, right=case.left) + self.assertTrue( + matches( + new_case, + ~(m.BinaryOperation | m.BooleanOperation)(left=m.Name("foo")), + ) + ) + + def test_type_of_matcher_false(self) -> None: + self.assertFalse(matches(cst.Name("true"), m.TypeOf(m.SimpleString))) + self.assertFalse(matches(cst.Name("true"), m.TypeOf(m.Name)(value="false"))) + self.assertFalse( + matches(cst.Name("true"), m.TypeOf(m.SimpleString)(value="true")) + ) + self.assertFalse( + matches(cst.SimpleString("'foo'"), m.TypeOf(m.Name, m.Attribute)) + ) + self.assertFalse( + matches( + cst.SimpleString("'foo'"), m.TypeOf(m.Name, m.Attribute)(value="'foo'") + ) + ) + self.assertFalse( + matches( + cst.SimpleString("'foo'"), + m.TypeOf(m.Name, m.SimpleString)(value="'bar'"), + ) + ) + + for case in ( + cst.BinaryOperation( + left=cst.Name("foo"), operator=cst.Add(), right=cst.Name("bar") + ), + cst.BooleanOperation( + left=cst.Name("foo"), operator=cst.Or(), right=cst.Name("bar") + ), + ): + self.assertFalse( + matches( + case, (m.BinaryOperation | m.BooleanOperation)(left=m.Name("bar")) + ) + ) + self.assertFalse( + matches( + case, ~(m.BinaryOperation | m.BooleanOperation)(left=m.Name("foo")) + ) + ) + def test_or_matcher_true(self) -> None: # Match on either True or False identifier. self.assertTrue( From 0a4d1e4985a2d82e214062a2ec59b31d4d776f7b Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 24 Sep 2020 22:31:39 +0100 Subject: [PATCH 069/632] Removing an import alias with a trailing standalone comment should preserve the comment (#392) * failing test case * implement fix * fix linters --- libcst/codemod/visitors/_remove_imports.py | 95 +++++++++++++++---- .../visitors/tests/test_remove_imports.py | 89 +++++++++++++++++ 2 files changed, 165 insertions(+), 19 deletions(-) diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 67e42fd7..629fc021 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -from typing import Dict, List, Optional, Sequence, Set, Tuple, Union +from typing import Any, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union import libcst as cst from libcst.codemod._context import CodemodContext @@ -337,24 +337,15 @@ class RemoveImportsVisitor(ContextAwareTransformer): ] return updated_node.with_changes(names=names_to_keep) - def leave_ImportFrom( - self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom - ) -> Union[cst.ImportFrom, cst.RemovalSentinel]: - names = original_node.names - if isinstance(names, cst.ImportStar): - # This is a star import, so we won't remove it. - return updated_node - - # Make sure we actually know the absolute module. - module_name = get_absolute_module_for_import( - self.context.full_module_name, updated_node - ) - if module_name is None or module_name not in self.unused_obj_imports: - # This node isn't on our list of todos, so let's bail. - return updated_node - objects_to_remove = self.unused_obj_imports[module_name] - + def _process_importfrom_aliases( + self, + updated_node: cst.ImportFrom, + names: Iterable[cst.ImportAlias], + module_name: str, + ) -> Dict[str, Any]: + updates = {} names_to_keep = [] + objects_to_remove = self.unused_obj_imports[module_name] for import_alias in names: # Figure out if it is in our list of things to kill for name, alias in objects_to_remove: @@ -374,6 +365,56 @@ class RemoveImportsVisitor(ContextAwareTransformer): names_to_keep.append(import_alias) continue + # We are about to remove `import_alias`. Check if there are any + # trailing comments and reparent them to the previous import. + # We only do this in case there's a trailing comma, otherwise the + # entire import statement is going to be removed anyway. + comma = import_alias.comma + if isinstance(comma, cst.Comma): + if len(names_to_keep) != 0: + # there is a previous import alias + prev = names_to_keep[-1] + names_to_keep[-1] = prev.with_deep_changes( + whitespace_after=_merge_whitespace_after( + prev.comma.whitespace_after, + comma.whitespace_after, + ) + ) + else: + # No previous import alias, need to attach comment to `ImportFrom`. + # We can only do this if there was a leftparen on the import + # statement. Otherwise there can't be any standalone comments + # anyway, so it's fine to skip this logic. + lpar = updated_node.lpar + if isinstance(lpar, cst.LeftParen): + updates["lpar"] = lpar.with_changes( + whitespace_after=_merge_whitespace_after( + lpar.whitespace_after, + comma.whitespace_after, + ) + ) + updates["names"] = names_to_keep + return updates + + def leave_ImportFrom( + self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom + ) -> Union[cst.ImportFrom, cst.RemovalSentinel]: + names = original_node.names + if isinstance(names, cst.ImportStar): + # This is a star import, so we won't remove it. + return updated_node + + # Make sure we actually know the absolute module. + module_name = get_absolute_module_for_import( + self.context.full_module_name, updated_node + ) + if module_name is None or module_name not in self.unused_obj_imports: + # This node isn't on our list of todos, so let's bail. + return updated_node + + updates = self._process_importfrom_aliases(updated_node, names, module_name) + names_to_keep = updates["names"] + # no changes if names_to_keep == names: return updated_node @@ -389,4 +430,20 @@ class RemoveImportsVisitor(ContextAwareTransformer): *names_to_keep[:-1], names_to_keep[-1].with_changes(comma=cst.MaybeSentinel.DEFAULT), ] - return updated_node.with_changes(names=names_to_keep) + updates["names"] = names_to_keep + return updated_node.with_changes(**updates) + + +def _merge_whitespace_after( + left: cst.BaseParenthesizableWhitespace, right: cst.BaseParenthesizableWhitespace +) -> cst.BaseParenthesizableWhitespace: + if not isinstance(right, cst.ParenthesizedWhitespace): + return left + if not isinstance(left, cst.ParenthesizedWhitespace): + return right + + return left.with_changes( + empty_lines=tuple( + line for line in right.empty_lines if line.comment is not None + ), + ) diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index d8d0e186..f98aacbb 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -57,6 +57,95 @@ class TestRemoveImportsCodemod(CodemodTest): self.assertCodemod(before, after, [("baz", None, None)]) + def test_remove_fromimport_simple(self) -> None: + before = "from a import b, c" + after = "from a import c" + self.assertCodemod(before, after, [("a", "b", None)]) + + def test_remove_fromimport_keeping_standalone_comment(self) -> None: + before = """ + from foo import ( + bar, + # comment + baz, + ) + from loooong import ( + bar, + # comment + short, + this_stays + ) + from third import ( + # comment + short, + this_stays_too + ) + """ + after = """ + from foo import ( + # comment + baz, + ) + from loooong import ( + this_stays + ) + from third import ( + this_stays_too + ) + """ + self.assertCodemod( + before, + after, + [ + ("foo", "bar", None), + ("loooong", "short", None), + ("loooong", "bar", None), + ("third", "short", None), + ], + ) + + def test_remove_fromimport_keeping_inline_comment(self) -> None: + before = """ + from foo import ( # comment + bar, + # comment2 + baz, + ) + from loooong import ( + bar, + short, # comment + # comment2 + this_stays + ) + from third import ( + short, # comment + this_stays_too # comment2 + ) + """ + after = """ + from foo import ( # comment + # comment2 + baz, + ) + from loooong import ( + # comment2 + this_stays + ) + from third import ( + this_stays_too # comment2 + ) + """ + self.assertCodemod( + before, + after, + [ + ("foo", "bar", None), + ("loooong", "short", None), + ("loooong", "bar", None), + ("third", "short", None), + ], + ) + def test_remove_import_alias_simple(self) -> None: """ Should remove aliased module as import From efe0fdbf31b8f6514b047abcaa2b4e5b279b6350 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 28 Sep 2020 17:28:07 +0100 Subject: [PATCH 070/632] bump version to 0.3.11 (#395) --- CHANGELOG.md | 12 ++++++++++++ libcst/_version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ee575617..afba505c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +# 0.3.11 - 2020-09-29 + +## Added +- Implement TypeOf matcher [#384](https://github.com/Instagram/LibCST/pull/384) + +## Updated +- Update return type of ParentNodeProvider to be CSTNode [#377](https://github.com/Instagram/LibCST/pull/377) +- Add source code links to each class/function [#378](https://github.com/Instagram/LibCST/pull/378) + +## Fixed +- Removing an import alias with a trailing standalone comment should preserve the comment [#392](https://github.com/Instagram/LibCST/pull/392) + # 0.3.10 - 2020-09-17 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index 343c1bb2..ba78b6ef 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.10" +LIBCST_VERSION: str = "0.3.11" From 34c1826c180eebad853fc01cb74e506b13386304 Mon Sep 17 00:00:00 2001 From: Caleb Donovick Date: Tue, 29 Sep 2020 09:45:50 -0700 Subject: [PATCH 071/632] Provide STORE for {Class,Function}Def.name in ExpressionContextProvider (#394) * Add failing test cases * mark *Def names as STORE * Update libcst/metadata/expression_context_provider.py Co-authored-by: Jimmy Lai * Fix lint * Visit annotations and params * Fix and extend tests Co-authored-by: Jimmy Lai --- libcst/matchers/tests/test_findall.py | 40 +++++++++++++++++-- .../tests/test_matchers_with_metadata.py | 8 ++-- .../metadata/expression_context_provider.py | 38 ++++++++++++++++++ .../tests/test_expression_context_provider.py | 35 ++++++++++++++++ 4 files changed, 113 insertions(+), 8 deletions(-) diff --git a/libcst/matchers/tests/test_findall.py b/libcst/matchers/tests/test_findall.py index ade99743..95233f53 100644 --- a/libcst/matchers/tests/test_findall.py +++ b/libcst/matchers/tests/test_findall.py @@ -75,7 +75,15 @@ class MatchersFindAllTest(UnitTest): meta.ExpressionContextProvider, meta.ExpressionContext.STORE ), ) - self.assertNodeSequenceEqual(booleans, [cst.Name("a"), cst.Name("b")]) + self.assertNodeSequenceEqual( + booleans, + [ + cst.Name("a"), + cst.Name("b"), + cst.Name("foo"), + cst.Name("bar"), + ], + ) # Test that we can provide an explicit resolver and tree booleans = findall( @@ -85,7 +93,15 @@ class MatchersFindAllTest(UnitTest): ), metadata_resolver=wrapper, ) - self.assertNodeSequenceEqual(booleans, [cst.Name("a"), cst.Name("b")]) + self.assertNodeSequenceEqual( + booleans, + [ + cst.Name("a"), + cst.Name("b"), + cst.Name("foo"), + cst.Name("bar"), + ], + ) # Test that failing to provide metadata leads to no match booleans = findall( @@ -127,7 +143,15 @@ class MatchersFindAllTest(UnitTest): wrapper = meta.MetadataWrapper(module) visitor = TestVisitor() wrapper.visit(visitor) - self.assertNodeSequenceEqual(visitor.results, [cst.Name("a"), cst.Name("b")]) + self.assertNodeSequenceEqual( + visitor.results, + [ + cst.Name("a"), + cst.Name("b"), + cst.Name("foo"), + cst.Name("bar"), + ], + ) def test_findall_with_transformers(self) -> None: # Find all assignments in a tree @@ -160,7 +184,15 @@ class MatchersFindAllTest(UnitTest): wrapper = meta.MetadataWrapper(module) visitor = TestTransformer() wrapper.visit(visitor) - self.assertNodeSequenceEqual(visitor.results, [cst.Name("a"), cst.Name("b")]) + self.assertNodeSequenceEqual( + visitor.results, + [ + cst.Name("a"), + cst.Name("b"), + cst.Name("foo"), + cst.Name("bar"), + ], + ) class MatchersExtractAllTest(UnitTest): diff --git a/libcst/matchers/tests/test_matchers_with_metadata.py b/libcst/matchers/tests/test_matchers_with_metadata.py index fcc86f66..a41913d9 100644 --- a/libcst/matchers/tests/test_matchers_with_metadata.py +++ b/libcst/matchers/tests/test_matchers_with_metadata.py @@ -492,7 +492,7 @@ class MatchersVisitorMetadataTest(UnitTest): visitor = TestVisitor() module.visit(visitor) - self.assertEqual(visitor.match_names, {"a", "b", "c"}) + self.assertEqual(visitor.match_names, {"a", "b", "c", "foo", "bar"}) def test_matches_on_transformers(self) -> None: # Set up a simple visitor that has a metadata dependency, try to use it in matchers. @@ -533,7 +533,7 @@ class MatchersVisitorMetadataTest(UnitTest): visitor = TestTransformer() module.visit(visitor) - self.assertEqual(visitor.match_names, {"a", "b", "c"}) + self.assertEqual(visitor.match_names, {"a", "b", "c", "foo", "bar"}) def test_matches_decorator_on_visitors(self) -> None: # Set up a simple visitor that has a metadata dependency, try to use it in matchers. @@ -573,7 +573,7 @@ class MatchersVisitorMetadataTest(UnitTest): visitor = TestVisitor() module.visit(visitor) - self.assertEqual(visitor.match_names, {"a", "b", "c"}) + self.assertEqual(visitor.match_names, {"a", "b", "c", "foo", "bar"}) def test_matches_decorator_on_transformers(self) -> None: # Set up a simple visitor that has a metadata dependency, try to use it in matchers. @@ -613,4 +613,4 @@ class MatchersVisitorMetadataTest(UnitTest): visitor = TestTransformer() module.visit(visitor) - self.assertEqual(visitor.match_names, {"a", "b", "c"}) + self.assertEqual(visitor.match_names, {"a", "b", "c", "foo", "bar"}) diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index b6ee1a35..b06ba113 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -155,6 +155,44 @@ class ExpressionContextVisitor(cst.CSTVisitor): def visit_StarredElement(self, node: cst.StarredElement) -> Optional[bool]: self.provider.set_metadata(node, self.context) + def visit_ClassDef(self, node: cst.ClassDef) -> Optional[bool]: + node.name.visit( + ExpressionContextVisitor(self.provider, ExpressionContext.STORE) + ) + node.body.visit(self) + for base in node.bases: + base.visit(self) + for keyword in node.keywords: + keyword.visit(self) + for decorator in node.decorators: + decorator.visit(self) + return False + + def visit_FunctionDef(self, node: cst.FunctionDef) -> Optional[bool]: + node.name.visit( + ExpressionContextVisitor(self.provider, ExpressionContext.STORE) + ) + node.params.visit(self) + node.body.visit(self) + for decorator in node.decorators: + decorator.visit(self) + returns = node.returns + if returns: + returns.visit(self) + return False + + def visit_Param(self, node: cst.Param) -> Optional[bool]: + node.name.visit( + ExpressionContextVisitor(self.provider, ExpressionContext.STORE) + ) + annotation = node.annotation + if annotation: + annotation.visit(self) + default = node.default + if default: + default.visit(self) + return False + class ExpressionContextProvider(BatchableMetadataProvider[Optional[ExpressionContext]]): """ diff --git a/libcst/metadata/tests/test_expression_context_provider.py b/libcst/metadata/tests/test_expression_context_provider.py index 439e6e3c..25cc1d0d 100644 --- a/libcst/metadata/tests/test_expression_context_provider.py +++ b/libcst/metadata/tests/test_expression_context_provider.py @@ -4,6 +4,7 @@ # LICENSE file in the root directory of this source tree. +from textwrap import dedent from typing import Dict, Optional, cast import libcst as cst @@ -376,3 +377,37 @@ class ExpressionContextProviderTest(UnitTest): }, ) ) + + def test_class(self) -> None: + code = """ + class Foo(Bar): + x = y + """ + wrapper = MetadataWrapper(parse_module(dedent(code))) + wrapper.visit( + DependentVisitor( + test=self, + name_to_context={ + "Foo": ExpressionContext.STORE, + "Bar": ExpressionContext.LOAD, + "x": ExpressionContext.STORE, + "y": ExpressionContext.LOAD, + }, + ) + ) + + def test_function(self) -> None: + code = """def foo(x: int = y) -> None: pass""" + wrapper = MetadataWrapper(parse_module(code)) + wrapper.visit( + DependentVisitor( + test=self, + name_to_context={ + "foo": ExpressionContext.STORE, + "x": ExpressionContext.STORE, + "int": ExpressionContext.LOAD, + "y": ExpressionContext.LOAD, + "None": ExpressionContext.LOAD, + }, + ) + ) From 1d22a29d18afeda1eb878f3e905f91ddbe6af9df Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 1 Oct 2020 10:50:04 +0100 Subject: [PATCH 072/632] fix RemoveImportsVisitor crash when ImportAlias is inserted without comma (#397) The comment preserving logic introduced in #392 assumed that in an ImportFrom node, ImportAliases have a comma property (except for the last one). That's only true if the ImportFrom node is parsed from actual source, but isn't necessarily true if it's constructed manually. --- libcst/codemod/visitors/_remove_imports.py | 18 ++++++++---- .../visitors/tests/test_remove_imports.py | 28 ++++++++++++++++++- 2 files changed, 40 insertions(+), 6 deletions(-) diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 629fc021..9d3b6902 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -374,12 +374,20 @@ class RemoveImportsVisitor(ContextAwareTransformer): if len(names_to_keep) != 0: # there is a previous import alias prev = names_to_keep[-1] - names_to_keep[-1] = prev.with_deep_changes( - whitespace_after=_merge_whitespace_after( - prev.comma.whitespace_after, - comma.whitespace_after, + if isinstance(prev.comma, cst.Comma): + prev = prev.with_deep_changes( + prev.comma, + whitespace_after=_merge_whitespace_after( + prev.comma.whitespace_after, + comma.whitespace_after, + ), ) - ) + else: + # The previous alias didn't have a trailing comma. This can + # occur if the alias was generated, instead of being parsed + # from source. + prev = prev.with_changes(comma=comma) + names_to_keep[-1] = prev else: # No previous import alias, need to attach comment to `ImportFrom`. # We can only do this if there was a leftparen on the import diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index f98aacbb..564cf21e 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -6,7 +6,7 @@ import libcst as cst import libcst.matchers as m from libcst.codemod import CodemodContext, CodemodTest, VisitorBasedCodemodCommand -from libcst.codemod.visitors import RemoveImportsVisitor +from libcst.codemod.visitors import AddImportsVisitor, RemoveImportsVisitor from libcst.metadata import ( QualifiedName, QualifiedNameProvider, @@ -80,6 +80,12 @@ class TestRemoveImportsCodemod(CodemodTest): short, this_stays_too ) + from fourth import ( + a, + # comment + b, + c + ) """ after = """ from foo import ( @@ -92,6 +98,10 @@ class TestRemoveImportsCodemod(CodemodTest): from third import ( this_stays_too ) + from fourth import ( + a, + c + ) """ self.assertCodemod( before, @@ -101,6 +111,7 @@ class TestRemoveImportsCodemod(CodemodTest): ("loooong", "short", None), ("loooong", "bar", None), ("third", "short", None), + ("fourth", "b", None), ], ) @@ -887,6 +898,21 @@ class TestRemoveImportsCodemod(CodemodTest): RemoveImportTransformer(CodemodContext()).transform_module(module).code, ) + def test_remove_import_alias_after_inserting(self) -> None: + before = "from foo import bar, baz" + after = "from foo import quux, baz" + + class AddRemoveTransformer(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + AddImportsVisitor.add_needed_import(self.context, "foo", "quux") + RemoveImportsVisitor.remove_unused_import(self.context, "foo", "bar") + + module = cst.parse_module(self.make_fixture_data(before)) + self.assertCodeEqual( + AddRemoveTransformer(CodemodContext()).transform_module(module).code, + after, + ) + def test_remove_comma(self) -> None: """ Trailing commas should be removed if and only if the last alias is removed. From 10d64510673997ef66bb123e6839852eb09111cb Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 1 Oct 2020 15:49:02 +0100 Subject: [PATCH 073/632] bump version to 0.3.12 (#398) --- CHANGELOG.md | 6 ++++++ libcst/_version.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index afba505c..9ebcdb47 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +# 0.3.12 - 2020-10-01 + +## Fixed +- fix RemoveImportsVisitor crash when ImportAlias is inserted without comma [#397](https://github.com/Instagram/LibCST/pull/397) +- Provide STORE for {Class,Function}Def.name in ExpressionContextProvider [#394](https://github.com/Instagram/LibCST/pull/394) + # 0.3.11 - 2020-09-29 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index ba78b6ef..d1a3ae31 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.11" +LIBCST_VERSION: str = "0.3.12" From 6731aa5d2940ce82dd221963b64a496955bfd07a Mon Sep 17 00:00:00 2001 From: Caleb Donovick Date: Thu, 1 Oct 2020 13:45:41 -0700 Subject: [PATCH 074/632] Use correct type for AugAssign and AnnAssign target (#396) * Use correct type * Add tests * Suppress intentional type errors in pyre --- libcst/_nodes/statement.py | 4 +- libcst/_nodes/tests/base.py | 6 +++ libcst/_nodes/tests/test_assign.py | 72 ++++++++++++++++++++++++++++++ libcst/matchers/__init__.py | 32 ++++++------- 4 files changed, 96 insertions(+), 18 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index aecbc43c..6a831b85 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -1332,7 +1332,7 @@ class AnnAssign(BaseSmallStatement): """ #: The target that is being annotated and possibly assigned to. - target: BaseExpression + target: BaseAssignTargetExpression #: The annotation for the target. annotation: Annotation @@ -1393,7 +1393,7 @@ class AugAssign(BaseSmallStatement): """ #: Target that is being operated on and assigned to. - target: BaseExpression + target: BaseAssignTargetExpression #: The augmented assignment operation being performed. operator: BaseAugOp diff --git a/libcst/_nodes/tests/base.py b/libcst/_nodes/tests/base.py index 1a014442..ed3b2b10 100644 --- a/libcst/_nodes/tests/base.py +++ b/libcst/_nodes/tests/base.py @@ -95,6 +95,12 @@ class CSTNodeTest(UnitTest): with self.assertRaisesRegex(cst.CSTValidationError, expected_re): get_node() + def assert_invalid_types( + self, get_node: Callable[[], cst.CSTNode], expected_re: str + ) -> None: + with self.assertRaisesRegex(TypeError, expected_re): + get_node().validate_types_shallow() + def __assert_codegen( self, node: cst.CSTNode, diff --git a/libcst/_nodes/tests/test_assign.py b/libcst/_nodes/tests/test_assign.py index aad6f979..7b107d94 100644 --- a/libcst/_nodes/tests/test_assign.py +++ b/libcst/_nodes/tests/test_assign.py @@ -119,6 +119,29 @@ class AssignTest(CSTNodeTest): def test_invalid(self, **kwargs: Any) -> None: self.assert_invalid(**kwargs) + @data_provider( + ( + { + "get_node": ( + lambda: cst.Assign( + # pyre-ignore: Incompatible parameter type [6] + targets=[ + cst.BinaryOperation( + left=cst.Name("x"), + operator=cst.Add(), + right=cst.Integer("1"), + ), + ], + value=cst.Name("y"), + ) + ), + "expected_re": "Expected an instance of .*statement.AssignTarget.*", + }, + ) + ) + def test_invalid_types(self, **kwargs: Any) -> None: + self.assert_invalid_types(**kwargs) + class AnnAssignTest(CSTNodeTest): @data_provider( @@ -284,6 +307,31 @@ class AnnAssignTest(CSTNodeTest): def test_invalid(self, **kwargs: Any) -> None: self.assert_invalid(**kwargs) + @data_provider( + ( + { + "get_node": ( + lambda: cst.AnnAssign( + # pyre-ignore: Incompatible parameter type [6] + target=cst.BinaryOperation( + left=cst.Name("x"), + operator=cst.Add(), + right=cst.Integer("1"), + ), + annotation=cst.Annotation(cst.Name("int")), + equal=cst.AssignEqual(), + value=cst.Name("y"), + ) + ), + "expected_re": ( + "Expected an instance of .*BaseAssignTargetExpression.*" + ), + }, + ) + ) + def test_invalid_types(self, **kwargs: Any) -> None: + self.assert_invalid_types(**kwargs) + class AugAssignTest(CSTNodeTest): @data_provider( @@ -362,3 +410,27 @@ class AugAssignTest(CSTNodeTest): ) def test_valid(self, **kwargs: Any) -> None: self.validate_node(**kwargs) + + @data_provider( + ( + { + "get_node": ( + lambda: cst.AugAssign( + # pyre-ignore: Incompatible parameter type [6] + target=cst.BinaryOperation( + left=cst.Name("x"), + operator=cst.Add(), + right=cst.Integer("1"), + ), + operator=cst.Add(), + value=cst.Name("y"), + ) + ), + "expected_re": ( + "Expected an instance of .*BaseAssignTargetExpression.*" + ), + }, + ) + ) + def test_invalid_types(self, **kwargs: Any) -> None: + self.assert_invalid_types(**kwargs) diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 3b2d9077..73b3e7f2 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -223,10 +223,10 @@ class And(BaseBooleanOp, BaseMatcherNode): ] = DoNotCare() -BaseExpressionMatchType = Union[ - "BaseExpression", +BaseAssignTargetExpressionMatchType = Union[ + "BaseAssignTargetExpression", MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseExpression], bool]], + MatchIfTrue[Callable[[cst.BaseAssignTargetExpression], bool]], ] AnnotationMatchType = Union[ "Annotation", MetadataMatchType, MatchIfTrue[Callable[[cst.Annotation], bool]] @@ -248,10 +248,10 @@ SemicolonMatchType = Union[ @dataclass(frozen=True, eq=False, unsafe_hash=False) class AnnAssign(BaseSmallStatement, BaseMatcherNode): target: Union[ - BaseExpressionMatchType, + BaseAssignTargetExpressionMatchType, DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], ] = DoNotCare() annotation: Union[ AnnotationMatchType, @@ -285,6 +285,13 @@ class AnnAssign(BaseSmallStatement, BaseMatcherNode): ] = DoNotCare() +BaseExpressionMatchType = Union[ + "BaseExpression", + MetadataMatchType, + MatchIfTrue[Callable[[cst.BaseExpression], bool]], +] + + @dataclass(frozen=True, eq=False, unsafe_hash=False) class Annotation(BaseMatcherNode): annotation: Union[ @@ -597,13 +604,6 @@ class AssignEqual(BaseMatcherNode): ] = DoNotCare() -BaseAssignTargetExpressionMatchType = Union[ - "BaseAssignTargetExpression", - MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseAssignTargetExpression], bool]], -] - - @dataclass(frozen=True, eq=False, unsafe_hash=False) class AssignTarget(BaseMatcherNode): target: Union[ @@ -852,10 +852,10 @@ BaseAugOpMatchType = Union[ @dataclass(frozen=True, eq=False, unsafe_hash=False) class AugAssign(BaseSmallStatement, BaseMatcherNode): target: Union[ - BaseExpressionMatchType, + BaseAssignTargetExpressionMatchType, DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], ] = DoNotCare() operator: Union[ BaseAugOpMatchType, From 21d37b94b253876511a8e0be65b9088129c1f787 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Mon, 12 Oct 2020 11:12:28 -0700 Subject: [PATCH 075/632] Support string annotations for type aliases (#401) --- libcst/_parser/types/tests/test_config.py | 2 +- libcst/metadata/scope_provider.py | 26 ++++++++++++-------- libcst/metadata/tests/test_scope_provider.py | 18 ++++++++++++-- libcst/tests/test_type_enforce.py | 1 + 4 files changed, 34 insertions(+), 13 deletions(-) diff --git a/libcst/_parser/types/tests/test_config.py b/libcst/_parser/types/tests/test_config.py index 6c0c0d0b..8b68bd18 100644 --- a/libcst/_parser/types/tests/test_config.py +++ b/libcst/_parser/types/tests/test_config.py @@ -12,7 +12,7 @@ from libcst.testing.utils import UnitTest, data_provider class TestConfig(UnitTest): @data_provider( { - "empty": (lambda: PartialParserConfig(),), + "empty": (PartialParserConfig,), "python_version_a": (lambda: PartialParserConfig(python_version="3.7"),), "python_version_b": (lambda: PartialParserConfig(python_version="3.7.1"),), "encoding": (lambda: PartialParserConfig(encoding="latin-1"),), diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index e1c7f196..f0cde1dc 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -643,7 +643,10 @@ class ScopeVisitor(cst.CSTVisitor): self.scope: Scope = GlobalScope() self.__deferred_accesses: List[Tuple[Access, Optional[cst.Attribute]]] = [] self.__top_level_attribute_stack: List[Optional[cst.Attribute]] = [None] - self.__in_annotation: Set[Union[cst.Call, cst.Annotation]] = set() + self.__in_annotation: Set[ + Union[cst.Call, cst.Annotation, cst.Subscript] + ] = set() + self.__in_ignored_subscript: Set[cst.Subscript] = set() @contextmanager def _new_scope( @@ -699,10 +702,8 @@ class ScopeVisitor(cst.CSTVisitor): def visit_Call(self, node: cst.Call) -> Optional[bool]: self.__top_level_attribute_stack.append(None) - if any( - qn.name == "typing.TypeVar" - for qn in self.scope.get_qualified_names_for(node) - ): + qnames = self.scope.get_qualified_names_for(node) + if any(qn.name in {"typing.NewType", "typing.TypeVar"} for qn in qnames): node.func.visit(self) self.__in_annotation.add(node) for arg in node.args[1:]: @@ -731,21 +732,26 @@ class ScopeVisitor(cst.CSTVisitor): def _handle_string_annotation( self, node: Union[cst.SimpleString, cst.ConcatenatedString] ) -> None: - if self.__in_annotation: + if self.__in_annotation and not self.__in_ignored_subscript: value = node.evaluated_value if value: mod = cst.parse_module(value) mod.visit(self) def visit_Subscript(self, node: cst.Subscript) -> Optional[bool]: + qnames = self.scope.get_qualified_names_for(node.value) + if any(qn.name.startswith(("typing.", "typing_extensions.")) for qn in qnames): + self.__in_annotation.add(node) if any( - qn.name in ("typing.Literal", "typing_extensions.Literal") - for qn in self.scope.get_qualified_names_for(node.value) + qn.name in {"typing.Literal", "typing_extensions.Literal"} for qn in qnames ): - node.value.visit(self) - return False + self.__in_ignored_subscript.add(node) return True + def leave_Subscript(self, original_node: cst.Subscript) -> None: + self.__in_annotation.discard(original_node) + self.__in_ignored_subscript.discard(original_node) + def visit_Name(self, node: cst.Name) -> Optional[bool]: # not all Name have ExpressionContext context = self.provider.get_metadata(ExpressionContextProvider, node, None) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index d6337f3a..228fb276 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1018,8 +1018,8 @@ class ScopeProviderTest(UnitTest): def test_annotation_access(self) -> None: m, scopes = get_scope_metadata_provider( """ - from typing import Literal, TypeVar - from a import A, B, C, D, E, F + from typing import Literal, NewType, Optional, TypeVar + from a import A, B, C, D, E, F, G, H def x(a: A): pass def y(b: "B"): @@ -1029,6 +1029,8 @@ class ScopeProviderTest(UnitTest): DType = TypeVar("DType", bound=D) EType = TypeVar("EType", bound="E") FType = TypeVar("F") + GType = NewType("GType", "Optional[G]") + HType = Optional["H"] """ ) imp = ensure_type( @@ -1068,6 +1070,18 @@ class ScopeProviderTest(UnitTest): self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 0) + assignment = list(scope["G"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertTrue(references[0].is_annotation) + + assignment = list(scope["H"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertTrue(references[0].is_annotation) + def test_node_of_scopes(self) -> None: m, scopes = get_scope_metadata_provider( """ diff --git a/libcst/tests/test_type_enforce.py b/libcst/tests/test_type_enforce.py index 0779ec37..edc283e5 100644 --- a/libcst/tests/test_type_enforce.py +++ b/libcst/tests/test_type_enforce.py @@ -53,6 +53,7 @@ class MyExampleClassWithMetaclass(metaclass=MyExampleMetaclass): pass +# lint-ignore: NoNamedTupleRule class NamedTupleSubclass(NamedTuple): a: str b: int From 01c8098965144feddaa34c1c9f58c92ce8265541 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Mon, 12 Oct 2020 14:04:44 -0700 Subject: [PATCH 076/632] Bump to version 0.3.13 (#403) --- CHANGELOG.md | 6 ++++++ libcst/_version.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ebcdb47..e15903ba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +# 0.3.13 - 2020-10-12 + +## Fixed +- Use correct type for AugAssign and AnnAssign target [#396](https://github.com/Instagram/LibCST/pull/396) +- Support string annotations for type aliases [#401](https://github.com/Instagram/LibCST/pull/401) + # 0.3.12 - 2020-10-01 ## Fixed diff --git a/libcst/_version.py b/libcst/_version.py index d1a3ae31..e00f3a61 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.12" +LIBCST_VERSION: str = "0.3.13" From a1b1ae45d53099880e4117616d4a5733a78bc3d6 Mon Sep 17 00:00:00 2001 From: luciawlli <60454589+luciawlli@users.noreply.github.com> Date: Wed, 28 Oct 2020 04:48:06 -0700 Subject: [PATCH 077/632] Add Access.is_type_hint for types used in classdef base and assignment values (#406) --- libcst/metadata/scope_provider.py | 36 ++++++++++++++++---- libcst/metadata/tests/test_scope_provider.py | 32 +++++++++++++---- 2 files changed, 55 insertions(+), 13 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index f0cde1dc..3c009dc2 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -65,12 +65,17 @@ class Access: is_annotation: bool + is_type_hint: bool + __assignments: Set["BaseAssignment"] - def __init__(self, node: cst.Name, scope: "Scope", is_annotation: bool) -> None: + def __init__( + self, node: cst.Name, scope: "Scope", is_annotation: bool, is_type_hint: bool + ) -> None: self.node = node self.scope = scope self.is_annotation = is_annotation + self.is_type_hint = is_type_hint self.__assignments = set() def __hash__(self) -> int: @@ -646,7 +651,9 @@ class ScopeVisitor(cst.CSTVisitor): self.__in_annotation: Set[ Union[cst.Call, cst.Annotation, cst.Subscript] ] = set() + self.__in_type_hint: Set[Union[cst.Call, cst.Annotation, cst.Subscript]] = set() self.__in_ignored_subscript: Set[cst.Subscript] = set() + self.__ignore_annotation: int = 0 @contextmanager def _new_scope( @@ -705,7 +712,7 @@ class ScopeVisitor(cst.CSTVisitor): qnames = self.scope.get_qualified_names_for(node) if any(qn.name in {"typing.NewType", "typing.TypeVar"} for qn in qnames): node.func.visit(self) - self.__in_annotation.add(node) + self.__in_type_hint.add(node) for arg in node.args[1:]: arg.visit(self) return False @@ -713,7 +720,7 @@ class ScopeVisitor(cst.CSTVisitor): def leave_Call(self, original_node: cst.Call) -> None: self.__top_level_attribute_stack.pop() - self.__in_annotation.discard(original_node) + self.__in_type_hint.discard(original_node) def visit_Annotation(self, node: cst.Annotation) -> Optional[bool]: self.__in_annotation.add(node) @@ -732,7 +739,9 @@ class ScopeVisitor(cst.CSTVisitor): def _handle_string_annotation( self, node: Union[cst.SimpleString, cst.ConcatenatedString] ) -> None: - if self.__in_annotation and not self.__in_ignored_subscript: + if ( + self.__in_type_hint or self.__in_annotation + ) and not self.__in_ignored_subscript: value = node.evaluated_value if value: mod = cst.parse_module(value) @@ -741,7 +750,7 @@ class ScopeVisitor(cst.CSTVisitor): def visit_Subscript(self, node: cst.Subscript) -> Optional[bool]: qnames = self.scope.get_qualified_names_for(node.value) if any(qn.name.startswith(("typing.", "typing_extensions.")) for qn in qnames): - self.__in_annotation.add(node) + self.__in_type_hint.add(node) if any( qn.name in {"typing.Literal", "typing_extensions.Literal"} for qn in qnames ): @@ -749,7 +758,7 @@ class ScopeVisitor(cst.CSTVisitor): return True def leave_Subscript(self, original_node: cst.Subscript) -> None: - self.__in_annotation.discard(original_node) + self.__in_type_hint.discard(original_node) self.__in_ignored_subscript.discard(original_node) def visit_Name(self, node: cst.Name) -> Optional[bool]: @@ -758,7 +767,14 @@ class ScopeVisitor(cst.CSTVisitor): if context == ExpressionContext.STORE: self.scope.record_assignment(node.value, node) elif context in (ExpressionContext.LOAD, ExpressionContext.DEL, None): - access = Access(node, self.scope, is_annotation=bool(self.__in_annotation)) + access = Access( + node, + self.scope, + is_annotation=bool( + self.__in_annotation and not self.__ignore_annotation + ), + is_type_hint=bool(self.__in_type_hint), + ) self.__deferred_accesses.append( (access, self.__top_level_attribute_stack[-1]) ) @@ -817,6 +833,12 @@ class ScopeVisitor(cst.CSTVisitor): statement.visit(self) return False + def visit_ClassDef_bases(self, node: cst.ClassDef) -> None: + self.__ignore_annotation += 1 + + def leave_ClassDef_bases(self, node: cst.ClassDef) -> None: + self.__ignore_annotation -= 1 + def visit_Global(self, node: cst.Global) -> Optional[bool]: for name_item in node.names: self.scope.record_global_overwrite(name_item.name.value) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 228fb276..ecf59fa1 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1018,8 +1018,8 @@ class ScopeProviderTest(UnitTest): def test_annotation_access(self) -> None: m, scopes = get_scope_metadata_provider( """ - from typing import Literal, NewType, Optional, TypeVar - from a import A, B, C, D, E, F, G, H + from typing import Literal, NewType, Optional, TypeVar, Callable + from a import A, B, C, D, E, F, G, H, I, J def x(a: A): pass def y(b: "B"): @@ -1031,6 +1031,10 @@ class ScopeProviderTest(UnitTest): FType = TypeVar("F") GType = NewType("GType", "Optional[G]") HType = Optional["H"] + IType = Callable[..., I] + + class Test(Generic[J]): + pass """ ) imp = ensure_type( @@ -1058,13 +1062,15 @@ class ScopeProviderTest(UnitTest): self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) references = list(assignment.references) - self.assertTrue(references[0].is_annotation) + self.assertFalse(references[0].is_annotation) + self.assertTrue(references[0].is_type_hint) assignment = list(scope["E"])[0] self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) references = list(assignment.references) - self.assertTrue(references[0].is_annotation) + self.assertFalse(references[0].is_annotation) + self.assertTrue(references[0].is_type_hint) assignment = list(scope["F"])[0] self.assertIsInstance(assignment, Assignment) @@ -1074,13 +1080,27 @@ class ScopeProviderTest(UnitTest): self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) references = list(assignment.references) - self.assertTrue(references[0].is_annotation) + self.assertFalse(references[0].is_annotation) + self.assertTrue(references[0].is_type_hint) assignment = list(scope["H"])[0] self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) references = list(assignment.references) - self.assertTrue(references[0].is_annotation) + self.assertFalse(references[0].is_annotation) + self.assertTrue(references[0].is_type_hint) + + assignment = list(scope["I"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertFalse(references[0].is_annotation) + + assignment = list(scope["J"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertFalse(references[0].is_annotation) def test_node_of_scopes(self) -> None: m, scopes = get_scope_metadata_provider( From a5de9e40a033eb35d86c9cf05137e9f367a95e0a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 3 Nov 2020 18:28:05 +0000 Subject: [PATCH 078/632] Pin linters (#412) * pin linter versions * fix lint errors --- libcst/codemod/_cli.py | 4 ++-- libcst/codemod/commands/convert_format_to_fstring.py | 2 +- libcst/codemod/visitors/_apply_type_annotations.py | 8 +++++--- libcst/helpers/_statement.py | 2 +- requirements-dev.txt | 6 +++--- 5 files changed, 12 insertions(+), 10 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index d4ced900..cb9c8529 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -590,7 +590,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 progress = Progress(enabled=not hide_progress, total=total) # Grab number of cores if we need to - jobs: int = jobs or cpu_count() + jobs: int = jobs if jobs is not None else cpu_count() if jobs < 1: raise Exception("Must have at least one job to process!") @@ -598,7 +598,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 if total == 0: return ParallelTransformResult(successes=0, failures=0, skips=0, warnings=0) - if repo_root: + if repo_root is not None: # Make sure if there is a root that we have the absolute path to it. repo_root = os.path.abspath(repo_root) # Spin up a full repo metadata manager so that we can provide metadata diff --git a/libcst/codemod/commands/convert_format_to_fstring.py b/libcst/codemod/commands/convert_format_to_fstring.py index 39b2f96c..29e81246 100644 --- a/libcst/codemod/commands/convert_format_to_fstring.py +++ b/libcst/codemod/commands/convert_format_to_fstring.py @@ -295,7 +295,7 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): spec_format_spec, spec_conversion, ) in format_spec_tokens: - if spec_format_spec: + if spec_format_spec is not None: # This shouldn't be possible, we don't allow it in the spec! raise Exception("Logic error!") if spec_literal_text: diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 77beac8b..2090c151 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -20,7 +20,7 @@ def _get_import_alias_names(import_aliases: Sequence[cst.ImportAlias]) -> Set[st import_names = set() for imported_name in import_aliases: asname = imported_name.asname - if asname: + if asname is not None: import_names.add(get_full_name_for_node(asname.name)) else: import_names.add(get_full_name_for_node(imported_name.name)) @@ -242,7 +242,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): super().__init__(context) # Qualifier for storing the canonical name of the current function. self.qualifier: List[str] = [] - self.annotations: Annotations = annotations or Annotations() + self.annotations: Annotations = ( + Annotations() if annotations is None else annotations + ) self.toplevel_annotations: Dict[str, cst.Annotation] = {} self.visited_classes: Set[str] = set() self.overwrite_existing_annotations = overwrite_existing_annotations @@ -297,7 +299,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): context_contents = self.context.scratch.get( ApplyTypeAnnotationsVisitor.CONTEXT_KEY ) - if context_contents: + if context_contents is not None: stub, overwrite_existing_annotations = context_contents self.overwrite_existing_annotations = ( self.overwrite_existing_annotations or overwrite_existing_annotations diff --git a/libcst/helpers/_statement.py b/libcst/helpers/_statement.py index 4a3825f2..a9431b44 100644 --- a/libcst/helpers/_statement.py +++ b/libcst/helpers/_statement.py @@ -31,7 +31,7 @@ def get_absolute_module_for_import( return None base_module = ".".join(modules[:-num_dots]) # Finally, if the module name was supplied, append it to the end. - if module_name: + if module_name is not None: # If we went all the way to the top, the base module should be empty, so we # should return the relative bit as absolute. Otherwise, combine the base # module and module name using a dot separator. diff --git a/requirements-dev.txt b/requirements-dev.txt index 84fd1d13..2f86e9b3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,12 +1,12 @@ -black>=19.10b0 +black==20.8b1 codecov>=2.1.4 coverage>=4.5.4 -fixit>=0.1.0 +fixit==0.1.1 flake8>=3.7.8 hypothesis>=4.36.0 hypothesmith>=0.0.4 git+https://github.com/jimmylai/sphinx.git@slots_type_annotation -isort>=4.3.20 +isort==5.5.3 jupyter>=1.0.0 nbsphinx>=0.4.2 pyre-check==0.0.41 From dd521f51e204e453ed8e69ef58c264273874d34d Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 6 Nov 2020 10:32:29 +0000 Subject: [PATCH 079/632] [scope] Visit concatenated f-strings (#411) This PR makes sure scope analysis visits nodes under a ConcatenatedString. --- libcst/metadata/scope_provider.py | 8 +++++--- libcst/metadata/tests/test_scope_provider.py | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 3c009dc2..39fb33c5 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -733,12 +733,12 @@ class ScopeVisitor(cst.CSTVisitor): return False def visit_ConcatenatedString(self, node: cst.ConcatenatedString) -> Optional[bool]: - self._handle_string_annotation(node) - return False + return not self._handle_string_annotation(node) def _handle_string_annotation( self, node: Union[cst.SimpleString, cst.ConcatenatedString] - ) -> None: + ) -> bool: + """Returns whether it successfully handled the string annotation""" if ( self.__in_type_hint or self.__in_annotation ) and not self.__in_ignored_subscript: @@ -746,6 +746,8 @@ class ScopeVisitor(cst.CSTVisitor): if value: mod = cst.parse_module(value) mod.visit(self) + return True + return False def visit_Subscript(self, node: cst.Subscript) -> Optional[bool]: qnames = self.scope.get_qualified_names_for(node.value) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index ecf59fa1..f04efa05 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -111,6 +111,25 @@ class ScopeProviderTest(UnitTest): wrapper = MetadataWrapper(cst.parse_module("def a():\n from b import c\n\n")) wrapper.visit(DependentVisitor()) + def test_fstring_accesses(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + from a import b + f"{b}" "hello" + """ + ) + global_scope = scopes[m] + self.assertIsInstance(global_scope, GlobalScope) + global_accesses = list(global_scope.accesses) + self.assertEqual(len(global_accesses), 1) + import_node = ensure_type( + ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.ImportFrom + ) + b_referent = list(global_accesses[0].referents)[0] + self.assertIsInstance(b_referent, Assignment) + if isinstance(b_referent, Assignment): # for the typechecker's eyes + self.assertEqual(b_referent.node, import_node) + @data_provider((("any",), ("True",), ("Exception",), ("__name__",))) def test_builtins(self, builtin: str) -> None: m, scopes = get_scope_metadata_provider( From 7478d738ea4911b51fe8c0758a6a405f7ce9c98a Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Mon, 9 Nov 2020 14:59:36 +0200 Subject: [PATCH 080/632] Codemod CLI multiprocessing simplification (#402) Co-authored-by: Zsolt Dollenstein --- libcst/codemod/_cli.py | 421 +++++++++++++--------------------- libcst/codemod/_dummy_pool.py | 35 +++ 2 files changed, 191 insertions(+), 265 deletions(-) create mode 100644 libcst/codemod/_dummy_pool.py diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index cb9c8529..985684c3 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -15,13 +15,13 @@ import sys import time import traceback from dataclasses import dataclass, replace -from multiprocessing import Process, Queue, cpu_count +from multiprocessing import Pool, cpu_count from pathlib import Path -from queue import Empty -from typing import AnyStr, Dict, List, Optional, Sequence, Set, Union, cast +from typing import Any, AnyStr, Dict, List, Optional, Sequence, Union, cast from libcst import PartialParserConfig, parse_module from libcst.codemod._codemod import Codemod +from libcst.codemod._dummy_pool import DummyPool from libcst.codemod._runner import ( SkipFile, SkipReason, @@ -216,7 +216,7 @@ def _calculate_module(repo_root: Optional[str], filename: str) -> Optional[str]: @dataclass(frozen=True) -class ParallelExecResult: +class ExecutionResult: # File we have results for filename: str # Whether we actually changed the code for the file or not @@ -225,50 +225,51 @@ class ParallelExecResult: transform_result: TransformResult -def _parallel_exec_process_stub( # noqa: C901 - result_queue: "Queue[ParallelExecResult]", +@dataclass(frozen=True) +class ExecutionConfig: + blacklist_patterns: Sequence[str] = () + format_code: bool = False + formatter_args: Sequence[str] = () + generated_code_marker: str = _DEFAULT_GENERATED_CODE_MARKER + include_generated: bool = False + python_version: Optional[str] = None + repo_root: Optional[str] = None + unified_diff: Optional[int] = None + + +def _execute_transform( # noqa: C901 transformer: Codemod, filename: str, - repo_root: Optional[str], - unified_diff: Optional[int], - include_generated: bool, - generated_code_marker: str, - format_code: bool, - formatter_args: Sequence[str], - blacklist_patterns: Sequence[str], - python_version: Optional[str], -) -> None: - for pattern in blacklist_patterns: + config: ExecutionConfig, +) -> ExecutionResult: + for pattern in config.blacklist_patterns: if re.fullmatch(pattern, filename): - result_queue.put( - ParallelExecResult( - filename=filename, - changed=False, - transform_result=TransformSkip( - skip_reason=SkipReason.BLACKLISTED, - skip_description=f"Blacklisted by pattern {pattern}.", - ), - ) + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformSkip( + skip_reason=SkipReason.BLACKLISTED, + skip_description=f"Blacklisted by pattern {pattern}.", + ), ) - return try: with open(filename, "rb") as fp: oldcode = fp.read() # Skip generated files - if not include_generated and generated_code_marker.encode("utf-8") in oldcode: - result_queue.put( - ParallelExecResult( - filename=filename, - changed=False, - transform_result=TransformSkip( - skip_reason=SkipReason.GENERATED, - skip_description="Generated file.", - ), - ) + if ( + not config.include_generated + and config.generated_code_marker.encode("utf-8") in oldcode + ): + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformSkip( + skip_reason=SkipReason.GENERATED, + skip_description="Generated file.", + ), ) - return # Somewhat gross hack to provide the filename in the transform's context. # We do this after the fork so that a context that was initialized with @@ -277,7 +278,7 @@ def _parallel_exec_process_stub( # noqa: C901 transformer.context = replace( transformer.context, filename=filename, - full_module_name=_calculate_module(repo_root, filename), + full_module_name=_calculate_module(config.repo_root, filename), ) # Run the transform, bail if we failed or if we aren't formatting code @@ -285,8 +286,8 @@ def _parallel_exec_process_stub( # noqa: C901 input_tree = parse_module( oldcode, config=( - PartialParserConfig(python_version=python_version) - if python_version is not None + PartialParserConfig(python_version=str(config.python_version)) + if config.python_version is not None else PartialParserConfig() ), ) @@ -294,28 +295,43 @@ def _parallel_exec_process_stub( # noqa: C901 newcode = output_tree.bytes encoding = output_tree.encoding except KeyboardInterrupt: - result_queue.put( - ParallelExecResult( - filename=filename, changed=False, transform_result=TransformExit() - ) + return ExecutionResult( + filename=filename, changed=False, transform_result=TransformExit() ) - return except SkipFile as ex: - result_queue.put( - ParallelExecResult( + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformSkip( + skip_reason=SkipReason.OTHER, + skip_description=str(ex), + warning_messages=transformer.context.warnings, + ), + ) + except Exception as ex: + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformFailure( + error=ex, + traceback_str=traceback.format_exc(), + warning_messages=transformer.context.warnings, + ), + ) + + # Call formatter if needed, but only if we actually changed something in this + # file + if config.format_code and newcode != oldcode: + try: + newcode = invoke_formatter(config.formatter_args, newcode) + except KeyboardInterrupt: + return ExecutionResult( filename=filename, changed=False, - transform_result=TransformSkip( - skip_reason=SkipReason.OTHER, - skip_description=str(ex), - warning_messages=transformer.context.warnings, - ), + transform_result=TransformExit(), ) - ) - return - except Exception as ex: - result_queue.put( - ParallelExecResult( + except Exception as ex: + return ExecutionResult( filename=filename, changed=False, transform_result=TransformFailure( @@ -324,44 +340,14 @@ def _parallel_exec_process_stub( # noqa: C901 warning_messages=transformer.context.warnings, ), ) - ) - return - - # Call formatter if needed, but only if we actually changed something in this - # file - if format_code and newcode != oldcode: - try: - newcode = invoke_formatter(formatter_args, newcode) - except KeyboardInterrupt: - result_queue.put( - ParallelExecResult( - filename=filename, - changed=False, - transform_result=TransformExit(), - ) - ) - return - except Exception as ex: - result_queue.put( - ParallelExecResult( - filename=filename, - changed=False, - transform_result=TransformFailure( - error=ex, - traceback_str=traceback.format_exc(), - warning_messages=transformer.context.warnings, - ), - ) - ) - return # Format as unified diff if needed, otherwise save it back changed = oldcode != newcode - if unified_diff: + if config.unified_diff: newcode = diff_code( oldcode.decode(encoding), newcode.decode(encoding), - unified_diff, + config.unified_diff, filename=filename, ) else: @@ -373,32 +359,26 @@ def _parallel_exec_process_stub( # noqa: C901 newcode = "" # Inform success - result_queue.put( - ParallelExecResult( - filename=filename, - changed=changed, - transform_result=TransformSuccess( - warning_messages=transformer.context.warnings, code=newcode - ), - ) + return ExecutionResult( + filename=filename, + changed=changed, + transform_result=TransformSuccess( + warning_messages=transformer.context.warnings, code=newcode + ), ) except KeyboardInterrupt: - result_queue.put( - ParallelExecResult( - filename=filename, changed=False, transform_result=TransformExit() - ) + return ExecutionResult( + filename=filename, changed=False, transform_result=TransformExit() ) except Exception as ex: - result_queue.put( - ParallelExecResult( - filename=filename, - changed=False, - transform_result=TransformFailure( - error=ex, - traceback_str=traceback.format_exc(), - warning_messages=transformer.context.warnings, - ), - ) + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformFailure( + error=ex, + traceback_str=traceback.format_exc(), + warning_messages=transformer.context.warnings, + ), ) @@ -465,7 +445,7 @@ class Progress: def _print_parallel_result( - exec_result: ParallelExecResult, + exec_result: ExecutionResult, progress: Progress, *, unified_diff: bool, @@ -534,6 +514,13 @@ class ParallelTransformResult: skips: int +# Unfortunate wrapper required since there is no `istarmap_unordered`... +def _execute_transform_wrap( + job: Dict[str, Any], +) -> ExecutionResult: + return _execute_transform(**job) + + def parallel_exec_transform_with_prettyprint( # noqa: C901 transform: Codemod, files: Sequence[str], @@ -616,171 +603,75 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 ) print("Executing codemod...", file=sys.stderr) - # We place results in this queue inside _parallel_exec_process_stub - # so that we can control when things get printed to the console. - queue = Queue() - - if total == 1: - # Simple case, we should not pay for process overhead. Lets still - # use the exec stub however, so we can share code. - progress.print(0) - _parallel_exec_process_stub( - queue, - transform, - files[0], - repo_root, - unified_diff=unified_diff, - include_generated=include_generated, - generated_code_marker=generated_code_marker, - format_code=format_code, - formatter_args=formatter_args, - blacklist_patterns=blacklist_patterns, - python_version=python_version, - ) - result = queue.get() - _print_parallel_result( - result, - progress, - unified_diff=bool(unified_diff), - show_successes=show_successes, - hide_generated=hide_generated, - hide_blacklisted=hide_blacklisted, - ) - if isinstance(result.transform_result, TransformFailure): - return ParallelTransformResult( - successes=0, - failures=1, - skips=0, - warnings=len(result.transform_result.warning_messages), - ) - elif isinstance(result.transform_result, (TransformSkip, TransformExit)): - return ParallelTransformResult( - successes=0, - failures=0, - skips=1, - warnings=len(result.transform_result.warning_messages), - ) - elif isinstance(result.transform_result, TransformSuccess): - return ParallelTransformResult( - successes=1, - failures=0, - skips=0, - warnings=len(result.transform_result.warning_messages), - ) - else: - raise Exception("Logic error, unaccounted for result!") - - # Warm the parser, pre-fork. - parse_module( - "", - config=( - PartialParserConfig(python_version=python_version) - if python_version is not None - else PartialParserConfig() - ), + config = ExecutionConfig( + repo_root=repo_root, + unified_diff=unified_diff, + include_generated=include_generated, + generated_code_marker=generated_code_marker, + format_code=format_code, + formatter_args=formatter_args, + blacklist_patterns=blacklist_patterns, + python_version=python_version, ) - # Complex case, more than one file + if total == 1: + # Simple case, we should not pay for process overhead. + # Let's just use a dummy synchronous pool. + jobs = 1 + pool_impl = DummyPool + else: + pool_impl = Pool + # Warm the parser, pre-fork. + parse_module( + "", + config=( + PartialParserConfig(python_version=python_version) + if python_version is not None + else PartialParserConfig() + ), + ) + successes: int = 0 failures: int = 0 warnings: int = 0 skips: int = 0 - pending_processes: List[Process] = [] - - # Start processes - filename_to_process: Dict[str, Process] = {} - for f in files: - process = Process( - target=_parallel_exec_process_stub, - args=( - queue, - transform, - f, - repo_root, - unified_diff, - include_generated, - generated_code_marker, - format_code, - formatter_args, - blacklist_patterns, - python_version, - ), - ) - pending_processes.append(process) - filename_to_process[f] = process - - # Start the processes, allowing no more than num_processes to be running - # at once. - results_left = len(pending_processes) - joinable_processes: Set[Process] = set() - processes_started = 0 - - interrupted = False - while results_left > 0 and not interrupted: - while processes_started < jobs and pending_processes: - try: - # Move this process to the joinables - process = pending_processes.pop(0) - joinable_processes.add(process) - - # Start it, bookkeep that we did - process.start() - processes_started += 1 - except KeyboardInterrupt: - interrupted = True - continue + with pool_impl(processes=jobs) as p: # type: ignore + args = [ + { + "transformer": transform, + "filename": filename, + "config": config, + } + for filename in files + ] try: - result = queue.get(block=True, timeout=0.005) - except KeyboardInterrupt: - interrupted = True - continue - except Empty: - progress.print(successes + failures + skips) - continue + for result in p.imap_unordered(_execute_transform_wrap, args, chunksize=4): + # Print an execution result, keep track of failures + _print_parallel_result( + result, + progress, + unified_diff=bool(unified_diff), + show_successes=show_successes, + hide_generated=hide_generated, + hide_blacklisted=hide_blacklisted, + ) + progress.print(successes + failures + skips) - # Bookkeep the result, since we know the process that returned this is done. - results_left -= 1 - processes_started -= 1 + if isinstance(result.transform_result, TransformFailure): + failures += 1 + elif isinstance(result.transform_result, TransformSuccess): + successes += 1 + elif isinstance( + result.transform_result, (TransformExit, TransformSkip) + ): + skips += 1 - # Print an execution result, keep track of failures - _print_parallel_result( - result, - progress, - unified_diff=bool(unified_diff), - show_successes=show_successes, - hide_generated=hide_generated, - hide_blacklisted=hide_blacklisted, - ) - progress.print(successes + failures + skips) - - if isinstance(result.transform_result, TransformFailure): - failures += 1 - elif isinstance(result.transform_result, TransformSuccess): - successes += 1 - elif isinstance(result.transform_result, (TransformExit, TransformSkip)): - skips += 1 - - warnings += len(result.transform_result.warning_messages) - - # Join the process to free any related resources. - # Remove all references to the process to allow the GC to - # clean up any file handles. - process = filename_to_process.pop(result.filename, None) - if process: - process.join() - joinable_processes.discard(process) - - # Now, join on all of them so we don't leave zombies or hang - for p in joinable_processes: - p.join() + warnings += len(result.transform_result.warning_messages) + finally: + progress.clear() # Return whether there was one or more failure. - progress.clear() - - # If we caught an interrupt, raise that - if interrupted: - raise KeyboardInterrupt() return ParallelTransformResult( successes=successes, failures=failures, skips=skips, warnings=warnings ) diff --git a/libcst/codemod/_dummy_pool.py b/libcst/codemod/_dummy_pool.py new file mode 100644 index 00000000..d92307ce --- /dev/null +++ b/libcst/codemod/_dummy_pool.py @@ -0,0 +1,35 @@ +from types import TracebackType +from typing import Callable, Generator, Iterable, Optional, Type, TypeVar + + +RetT = TypeVar("RetT") +ArgT = TypeVar("ArgT") + + +class DummyPool: + """ + Synchronous dummy `multiprocessing.Pool` analogue. + """ + + def __init__(self, processes: Optional[int] = None) -> None: + pass + + def imap_unordered( + self, + func: Callable[[ArgT], RetT], + iterable: Iterable[ArgT], + chunksize: Optional[int] = None, + ) -> Generator[RetT, None, None]: + for args in iterable: + yield func(args) + + def __enter__(self) -> "DummyPool": + return self + + def __exit__( + self, + exc_type: Optional[Type[Exception]], + exc: Optional[Exception], + tb: Optional[TracebackType], + ) -> None: + pass From 31bae01ccb112432ee2276c97d845ad0bed0b7e8 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Thu, 12 Nov 2020 21:14:34 -0800 Subject: [PATCH 081/632] Correct handling of walrus operator in function args (#417) Previous behavior treated it as identical to equal, making a kwarg; it should instead be a positional arg. Includes several tests to make sure that whitespace handling is correct. Fixes #416 --- libcst/_nodes/tests/test_namedexpr.py | 65 ++++++++++++++++++++++++ libcst/_parser/conversions/expression.py | 10 +++- 2 files changed, 74 insertions(+), 1 deletion(-) diff --git a/libcst/_nodes/tests/test_namedexpr.py b/libcst/_nodes/tests/test_namedexpr.py index 4ba1485b..3949bbea 100644 --- a/libcst/_nodes/tests/test_namedexpr.py +++ b/libcst/_nodes/tests/test_namedexpr.py @@ -101,6 +101,71 @@ class NamedExprTest(CSTNodeTest): "parser": _parse_statement_force_38, "expected_position": None, }, + # Function args + { + "node": cst.Call( + func=cst.Name(value="f"), + args=[ + cst.Arg( + value=cst.NamedExpr( + target=cst.Name(value="y"), + value=cst.Integer(value="1"), + whitespace_before_walrus=cst.SimpleWhitespace(""), + whitespace_after_walrus=cst.SimpleWhitespace(""), + ) + ), + ], + ), + "code": "f(y:=1)", + "parser": _parse_expression_force_38, + "expected_position": None, + }, + # Whitespace handling on args is fragile + { + "node": cst.Call( + func=cst.Name(value="f"), + args=[ + cst.Arg( + value=cst.Name(value="x"), + comma=cst.Comma( + whitespace_after=cst.SimpleWhitespace(" ") + ), + ), + cst.Arg( + value=cst.NamedExpr( + target=cst.Name(value="y"), + value=cst.Integer(value="1"), + whitespace_before_walrus=cst.SimpleWhitespace(" "), + whitespace_after_walrus=cst.SimpleWhitespace(" "), + ), + whitespace_after_arg=cst.SimpleWhitespace(" "), + ), + ], + ), + "code": "f(x, y := 1 )", + "parser": _parse_expression_force_38, + "expected_position": None, + }, + { + "node": cst.Call( + func=cst.Name(value="f"), + args=[ + cst.Arg( + value=cst.NamedExpr( + target=cst.Name(value="y"), + value=cst.Integer(value="1"), + whitespace_before_walrus=cst.SimpleWhitespace(" "), + whitespace_after_walrus=cst.SimpleWhitespace(" "), + ), + whitespace_after_arg=cst.SimpleWhitespace(" "), + ), + ], + whitespace_before_args=cst.SimpleWhitespace(" "), + ), + "code": "f( y := 1 )", + "parser": _parse_expression_force_38, + "expected_position": None, + }, ) ) def test_valid(self, **kwargs: Any) -> None: diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index 8edbf262..e66f8368 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -1441,8 +1441,16 @@ def convert_arg_assign_comp_for( elt, for_in = children return Arg(value=GeneratorExp(elt.value, for_in, lpar=(), rpar=())) else: - # "key = value" assignment argument lhs, equal, rhs = children + # "key := value" assignment; positional + if equal.string == ":=": + val = convert_namedexpr_test(config, children) + if not isinstance(val, WithLeadingWhitespace): + raise Exception( + f"convert_namedexpr_test returned {val!r}, not WithLeadingWhitespace" + ) + return Arg(value=val.value) + # "key = value" assignment; keyword argument return Arg( keyword=lhs.value, equal=AssignEqual( From 90df5a6a377bfcfa7371c2ef47fcb7c69e38a5b6 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Tue, 17 Nov 2020 09:32:43 -0800 Subject: [PATCH 082/632] Allow generator expressions in f-strings (#419) Fixes #388 --- libcst/_nodes/tests/test_atom.py | 29 ++++++++++++++++++++++++ libcst/_parser/conversions/expression.py | 4 ++-- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index 452c9f70..1a14e372 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -668,6 +668,35 @@ class AtomTest(CSTNodeTest): "parser": parse_expression, "expected_position": CodeRange((1, 1), (1, 4)), }, + # Generator expression (doesn't make sense, but legal syntax) + { + "node": cst.FormattedString( + start='f"', + parts=[ + cst.FormattedStringExpression( + expression=cst.GeneratorExp( + elt=cst.Name( + value="x", + ), + for_in=cst.CompFor( + target=cst.Name( + value="x", + ), + iter=cst.Name( + value="y", + ), + ), + lpar=[], + rpar=[], + ), + ), + ], + end='"', + ), + "code": 'f"{x for x in y}"', + "parser": parse_expression, + "expected_position": None, + }, # Concatenated strings { "node": cst.ConcatenatedString( diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index e66f8368..b7e5c189 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -1038,12 +1038,12 @@ def convert_fstring_equality( @with_production( "fstring_expr", - "'{' testlist [ fstring_equality ] [ fstring_conversion ] [ fstring_format_spec ] '}'", + "'{' testlist_comp_tuple [ fstring_equality ] [ fstring_conversion ] [ fstring_format_spec ] '}'", version=">=3.8", ) @with_production( "fstring_expr", - "'{' testlist [ fstring_conversion ] [ fstring_format_spec ] '}'", + "'{' testlist_comp_tuple [ fstring_conversion ] [ fstring_format_spec ] '}'", version="<=3.7", ) def convert_fstring_expr( From 2ef730292bba090d248b257e6fcebb40638cb846 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 17 Nov 2020 17:40:50 +0000 Subject: [PATCH 083/632] [scope] keep track of assignment/access ordering (#413) --- libcst/metadata/scope_provider.py | 84 +++++++++++++-- libcst/metadata/tests/test_scope_provider.py | 102 +++++++++++++++++++ 2 files changed, 177 insertions(+), 9 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 39fb33c5..7886f458 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -36,6 +36,25 @@ from libcst.metadata.expression_context_provider import ( ) +_ASSIGNMENT_LIKE_NODES = ( + cst.AnnAssign, + cst.AsName, + cst.Assign, + cst.AugAssign, + cst.ClassDef, + cst.CompFor, + cst.For, + cst.FunctionDef, + cst.Global, + cst.Import, + cst.ImportFrom, + cst.NamedExpr, + cst.Nonlocal, + cst.Parameters, + cst.WithItem, +) + + @add_slots @dataclass(frozen=False) class Access: @@ -68,6 +87,7 @@ class Access: is_type_hint: bool __assignments: Set["BaseAssignment"] + __index: int def __init__( self, node: cst.Name, scope: "Scope", is_annotation: bool, is_type_hint: bool @@ -77,6 +97,7 @@ class Access: self.is_annotation = is_annotation self.is_type_hint = is_type_hint self.__assignments = set() + self.__index = scope._assignment_count def __hash__(self) -> int: return id(self) @@ -86,11 +107,25 @@ class Access: """Return all assignments of the access.""" return self.__assignments - def record_assignment(self, assignment: "BaseAssignment") -> None: - self.__assignments.add(assignment) + @property + def _index(self) -> int: + return self.__index - def record_assignments(self, assignments: Set["BaseAssignment"]) -> None: - self.__assignments |= assignments + def record_assignment(self, assignment: "BaseAssignment") -> None: + if assignment.scope != self.scope or assignment._index < self.__index: + self.__assignments.add(assignment) + + def record_assignments(self, name: str) -> None: + assignments = self.scope[name] + # filter out assignments that happened later than this access + previous_assignments = { + assignment + for assignment in assignments + if assignment.scope != self.scope or assignment._index < self.__index + } + if not previous_assignments and assignments: + previous_assignments = self.scope.parent[name] + self.__assignments |= previous_assignments class BaseAssignment(abc.ABC): @@ -109,10 +144,22 @@ class BaseAssignment(abc.ABC): self.__accesses = set() def record_access(self, access: Access) -> None: - self.__accesses.add(access) + if access.scope != self.scope or self._index < access._index: + self.__accesses.add(access) def record_accesses(self, accesses: Set[Access]) -> None: - self.__accesses |= accesses + later_accesses = { + access + for access in accesses + if access.scope != self.scope or self._index < access._index + } + self.__accesses |= later_accesses + earlier_accesses = accesses - later_accesses + if earlier_accesses and self.scope.parent != self.scope: + # Accesses "earlier" than the relevant assignment should be attached + # to assignments of the same name in the parent + for shadowed_assignment in self.scope.parent[self.name]: + shadowed_assignment.record_accesses(earlier_accesses) @property def references(self) -> Collection[Access]: @@ -123,6 +170,11 @@ class BaseAssignment(abc.ABC): def __hash__(self) -> int: return id(self) + @property + def _index(self) -> int: + """Return an integer that represents the order of assignments in `scope`""" + return -1 + class Assignment(BaseAssignment): """An assignment records the name, CSTNode and its accesses.""" @@ -130,11 +182,19 @@ class Assignment(BaseAssignment): #: The node of assignment, it could be a :class:`~libcst.Import`, :class:`~libcst.ImportFrom`, #: :class:`~libcst.Name`, :class:`~libcst.FunctionDef`, or :class:`~libcst.ClassDef`. node: cst.CSTNode + __index: int - def __init__(self, name: str, scope: "Scope", node: cst.CSTNode) -> None: + def __init__( + self, name: str, scope: "Scope", node: cst.CSTNode, index: int + ) -> None: self.node = node + self.__index = index super().__init__(name, scope) + @property + def _index(self) -> int: + return self.__index + # even though we don't override the constructor. class BuiltinAssignment(BaseAssignment): @@ -318,6 +378,7 @@ class Scope(abc.ABC): globals: "GlobalScope" _assignments: MutableMapping[str, Set[BaseAssignment]] _accesses: MutableMapping[str, Set[Access]] + _assignment_count: int def __init__(self, parent: "Scope") -> None: super().__init__() @@ -325,9 +386,12 @@ class Scope(abc.ABC): self.globals = parent.globals self._assignments = defaultdict(set) self._accesses = defaultdict(set) + self._assignment_count = 0 def record_assignment(self, name: str, node: cst.CSTNode) -> None: - self._assignments[name].add(Assignment(name=name, scope=self, node=node)) + self._assignments[name].add( + Assignment(name=name, scope=self, node=node, index=self._assignment_count) + ) def record_access(self, name: str, access: Access) -> None: self._accesses[name].add(access) @@ -934,7 +998,7 @@ class ScopeVisitor(cst.CSTVisitor): break scope_name_accesses[(access.scope, name)].add(access) - access.record_assignments(access.scope[name]) + access.record_assignments(name) access.scope.record_access(name, access) for (scope, name), accesses in scope_name_accesses.items(): @@ -945,6 +1009,8 @@ class ScopeVisitor(cst.CSTVisitor): def on_leave(self, original_node: cst.CSTNode) -> None: self.provider.set_metadata(original_node, self.scope) + if isinstance(original_node, _ASSIGNMENT_LIKE_NODES): + self.scope._assignment_count += 1 super().on_leave(original_node) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index f04efa05..e54bbff9 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1329,3 +1329,105 @@ class ScopeProviderTest(UnitTest): ) } self.assertEqual(names, {"a.b.c", "a.b", "a"}) + + def test_ordering(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + from a import b + class X: + x = b + b = b + y = b + """ + ) + global_scope = scopes[m] + import_stmt = ensure_type( + ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.ImportFrom + ) + first_assignment = list(global_scope.assignments)[0] + assert isinstance(first_assignment, cst.metadata.Assignment) + self.assertEqual(first_assignment.node, import_stmt) + global_refs = list(first_assignment.references) + self.assertEqual(len(global_refs), 2) + class_def = ensure_type(m.body[1], cst.ClassDef) + x = ensure_type( + ensure_type(class_def.body.body[0], cst.SimpleStatementLine).body[0], + cst.Assign, + ) + self.assertEqual(x.value, global_refs[0].node) + class_b = ensure_type( + ensure_type(class_def.body.body[1], cst.SimpleStatementLine).body[0], + cst.Assign, + ) + self.assertEqual(class_b.value, global_refs[1].node) + + class_accesses = list(scopes[x].accesses) + self.assertEqual(len(class_accesses), 3) + self.assertIn( + class_b.targets[0].target, + [ + ref.node + for acc in class_accesses + for ref in acc.referents + if isinstance(ref, Assignment) + ], + ) + y = ensure_type( + ensure_type(class_def.body.body[2], cst.SimpleStatementLine).body[0], + cst.Assign, + ) + self.assertIn(y.value, [access.node for access in class_accesses]) + + def test_ordering_between_scopes(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + def f(a): + print(a) + print(b) + a = 1 + b = 1 + """ + ) + f = cst.ensure_type(m.body[0], cst.FunctionDef) + a_param = f.params.params[0].name + a_param_assignment = list(scopes[a_param]["a"])[0] + a_param_refs = list(a_param_assignment.references) + first_print = cst.ensure_type( + cst.ensure_type( + cst.ensure_type(f.body.body[0], cst.SimpleStatementLine).body[0], + cst.Expr, + ).value, + cst.Call, + ) + second_print = cst.ensure_type( + cst.ensure_type( + cst.ensure_type(f.body.body[1], cst.SimpleStatementLine).body[0], + cst.Expr, + ).value, + cst.Call, + ) + self.assertEqual( + first_print.args[0].value, + a_param_refs[0].node, + ) + a_global = ( + cst.ensure_type( + cst.ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.Assign + ) + .targets[0] + .target + ) + a_global_assignment = list(scopes[a_global]["a"])[0] + a_global_refs = list(a_global_assignment.references) + self.assertEqual(a_global_refs, []) + b_global = ( + cst.ensure_type( + cst.ensure_type(m.body[2], cst.SimpleStatementLine).body[0], cst.Assign + ) + .targets[0] + .target + ) + b_global_assignment = list(scopes[b_global]["b"])[0] + b_global_refs = list(b_global_assignment.references) + self.assertEqual(len(b_global_refs), 1) + self.assertEqual(b_global_refs[0].node, second_print.args[0].value) From 110095148fdb1fcb1d66e1d3e35da1cb4efc2903 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Tue, 17 Nov 2020 10:55:44 -0800 Subject: [PATCH 084/632] Handle string type references in cast() (#418) * Handle string type references in cast() * Directly visit the first argument of cast() Co-authored-by: Zsolt Dollenstein Co-authored-by: Zsolt Dollenstein --- libcst/metadata/scope_provider.py | 18 ++++++----- libcst/metadata/tests/test_scope_provider.py | 33 +++++++++++++++++--- 2 files changed, 39 insertions(+), 12 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 7886f458..043f87e8 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -773,13 +773,19 @@ class ScopeVisitor(cst.CSTVisitor): def visit_Call(self, node: cst.Call) -> Optional[bool]: self.__top_level_attribute_stack.append(None) - qnames = self.scope.get_qualified_names_for(node) - if any(qn.name in {"typing.NewType", "typing.TypeVar"} for qn in qnames): + qnames = {qn.name for qn in self.scope.get_qualified_names_for(node)} + if "typing.NewType" in qnames or "typing.TypeVar" in qnames: node.func.visit(self) self.__in_type_hint.add(node) for arg in node.args[1:]: arg.visit(self) return False + if "typing.cast" in qnames: + node.func.visit(self) + self.__in_type_hint.add(node) + if len(node.args) > 0: + node.args[0].visit(self) + return False return True def leave_Call(self, original_node: cst.Call) -> None: @@ -814,12 +820,10 @@ class ScopeVisitor(cst.CSTVisitor): return False def visit_Subscript(self, node: cst.Subscript) -> Optional[bool]: - qnames = self.scope.get_qualified_names_for(node.value) - if any(qn.name.startswith(("typing.", "typing_extensions.")) for qn in qnames): + qnames = {qn.name for qn in self.scope.get_qualified_names_for(node.value)} + if any(qn.startswith(("typing.", "typing_extensions.")) for qn in qnames): self.__in_type_hint.add(node) - if any( - qn.name in {"typing.Literal", "typing_extensions.Literal"} for qn in qnames - ): + if "typing.Literal" in qnames or "typing_extensions.Literal" in qnames: self.__in_ignored_subscript.add(node) return True diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index e54bbff9..36fd19e5 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1037,23 +1037,24 @@ class ScopeProviderTest(UnitTest): def test_annotation_access(self) -> None: m, scopes = get_scope_metadata_provider( """ - from typing import Literal, NewType, Optional, TypeVar, Callable - from a import A, B, C, D, E, F, G, H, I, J + from typing import Literal, NewType, Optional, TypeVar, Callable, cast + from a import A, B, C, D, D2, E, E2, F, G, G2, H, I, J, K, K2 def x(a: A): pass def y(b: "B"): pass def z(c: Literal["C"]): pass - DType = TypeVar("DType", bound=D) - EType = TypeVar("EType", bound="E") + DType = TypeVar("D2", bound=D) + EType = TypeVar("E2", bound="E") FType = TypeVar("F") - GType = NewType("GType", "Optional[G]") + GType = NewType("G2", "Optional[G]") HType = Optional["H"] IType = Callable[..., I] class Test(Generic[J]): pass + casted = cast("K", "K2") """ ) imp = ensure_type( @@ -1084,6 +1085,10 @@ class ScopeProviderTest(UnitTest): self.assertFalse(references[0].is_annotation) self.assertTrue(references[0].is_type_hint) + assignment = list(scope["D2"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 0) + assignment = list(scope["E"])[0] self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) @@ -1091,6 +1096,10 @@ class ScopeProviderTest(UnitTest): self.assertFalse(references[0].is_annotation) self.assertTrue(references[0].is_type_hint) + assignment = list(scope["E2"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 0) + assignment = list(scope["F"])[0] self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 0) @@ -1102,6 +1111,10 @@ class ScopeProviderTest(UnitTest): self.assertFalse(references[0].is_annotation) self.assertTrue(references[0].is_type_hint) + assignment = list(scope["G2"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 0) + assignment = list(scope["H"])[0] self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) @@ -1121,6 +1134,16 @@ class ScopeProviderTest(UnitTest): references = list(assignment.references) self.assertFalse(references[0].is_annotation) + assignment = list(scope["K"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertFalse(references[0].is_annotation) + + assignment = list(scope["K2"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 0) + def test_node_of_scopes(self) -> None: m, scopes = get_scope_metadata_provider( """ From 77d8a903ee3f1e11ee35c728c8328832a453a9dc Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 18 Nov 2020 09:55:35 +0000 Subject: [PATCH 085/632] bump version to 0.3.14 (#420) * bump version to 0.3.14 * add whitespace --- CHANGELOG.md | 10 ++++++++++ libcst/_version.py | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e15903ba..dd0d1673 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ +# 0.3.14 - 2020-11-18 + +## Fixed +- Fix is_annotation for types used in classdef base and assign value [#406](https://github.com/Instagram/LibCST/pull/406) +- Visit concatenated f-strings during scope analysis [#411](https://github.com/Instagram/LibCST/pull/411) +- Correct handling of walrus operator in function args [#417](https://github.com/Instagram/LibCST/pull/417) +- Allow generator expressions in f-strings [#419](https://github.com/Instagram/LibCST/pull/419) +- Keep track of assignment/access ordering during scope analysis [#413](https://github.com/Instagram/LibCST/pull/413) +- Handle string type references in cast() during scope analysis [#418](https://github.com/Instagram/LibCST/pull/418) + # 0.3.13 - 2020-10-12 ## Fixed diff --git a/libcst/_version.py b/libcst/_version.py index e00f3a61..fa483e91 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.13" +LIBCST_VERSION: str = "0.3.14" From 16b30fe4c3397133d660c1b34e0ca346bfb5d48e Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 18 Nov 2020 23:34:26 +0000 Subject: [PATCH 086/632] add missing license header (#421) --- libcst/codemod/_dummy_pool.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/libcst/codemod/_dummy_pool.py b/libcst/codemod/_dummy_pool.py index d92307ce..922037dd 100644 --- a/libcst/codemod/_dummy_pool.py +++ b/libcst/codemod/_dummy_pool.py @@ -1,3 +1,8 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + from types import TracebackType from typing import Callable, Generator, Iterable, Optional, Type, TypeVar From 02fc4401bc3313f0db109d86d1b84c6b35abba5a Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sun, 29 Nov 2020 20:42:13 -0800 Subject: [PATCH 087/632] Support Named Unicode Characters and yield in f-strings (#424) * Support named unicode characters in f-strings This is the same as my pull request https://github.com/davidhalter/parso/pull/160 * A small bugfix to what is allowed in f-string expressions Thanks to https://github.com/davidhalter/parso/pull/159 for catching that yield (as an expression, I suppose) is allowed on 3.6. --- libcst/_nodes/tests/test_atom.py | 28 ++++++++++++++++++++++++ libcst/_parser/conversions/expression.py | 6 ++--- libcst/_parser/parso/python/tokenize.py | 11 ++++++++-- 3 files changed, 40 insertions(+), 5 deletions(-) diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index 1a14e372..64561146 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -655,6 +655,34 @@ class AtomTest(CSTNodeTest): "parser": _parse_expression_force_38, "expected_position": None, }, + { + "node": cst.FormattedString( + parts=( + cst.FormattedStringExpression( + cst.Yield( + value=cst.Integer("1"), + whitespace_after_yield=cst.SimpleWhitespace(" "), + ), + ), + ), + ), + "code": 'f"{yield 1}"', + "parser": _parse_expression_force_38, + "expected_position": None, + }, + { + "node": cst.FormattedString( + parts=( + cst.FormattedStringText("\\N{X Y}"), + cst.FormattedStringExpression( + cst.Name(value="Z"), + ), + ), + ), + "code": 'f"\\N{X Y}{Z}"', + "parser": parse_expression, + "expected_position": None, + }, # Validate parens { "node": cst.FormattedString( diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index b7e5c189..aa84a4e1 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -1038,13 +1038,13 @@ def convert_fstring_equality( @with_production( "fstring_expr", - "'{' testlist_comp_tuple [ fstring_equality ] [ fstring_conversion ] [ fstring_format_spec ] '}'", + "'{' (testlist_comp_tuple | yield_expr) [ fstring_equality ] [ fstring_conversion ] [ fstring_format_spec ] '}'", version=">=3.8", ) @with_production( "fstring_expr", - "'{' testlist_comp_tuple [ fstring_conversion ] [ fstring_format_spec ] '}'", - version="<=3.7", + "'{' (testlist_comp_tuple | yield_expr) [ fstring_conversion ] [ fstring_format_spec ] '}'", + version="<3.8", ) def convert_fstring_expr( config: ParserConfig, children: typing.Sequence[typing.Any] diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index 0f60472e..478bc03a 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -146,8 +146,15 @@ def _get_token_collection(version_info: PythonVersionInfo) -> TokenCollection: return result -fstring_string_single_line = _compile(r"(?:\{\{|\}\}|\\(?:\r\n?|\n)|[^{}\r\n])+") -fstring_string_multi_line = _compile(r"(?:[^{}]+|\{\{|\}\})+") +unicode_character_name = r"[A-Za-z0-9\-]+(?: [A-Za-z0-9\-]+)*" +fstring_string_single_line = _compile( + r"(?:\{\{|\}\}|\\N\{" + + unicode_character_name + + r"\}|\\(?:\r\n?|\n)|\\[^\r\nN]|[^{}\r\n\\])+" +) +fstring_string_multi_line = _compile( + r"(?:\{\{|\}\}|\\N\{" + unicode_character_name + r"\}|\\[^N]|[^{}\\])+" +) fstring_format_spec_single_line = _compile(r"(?:\\(?:\r\n?|\n)|[^{}\r\n])+") fstring_format_spec_multi_line = _compile(r"[^{}]+") From 2485d5a9670740ee544d9afe4fe3cb746909c532 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Sun, 29 Nov 2020 20:50:23 -0800 Subject: [PATCH 088/632] [Scope] Fix referencing of remaining objects in cast() (#422) --- libcst/metadata/scope_provider.py | 5 ++++- libcst/metadata/tests/test_scope_provider.py | 15 +++++++++++++-- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 043f87e8..c5431796 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -782,9 +782,12 @@ class ScopeVisitor(cst.CSTVisitor): return False if "typing.cast" in qnames: node.func.visit(self) - self.__in_type_hint.add(node) if len(node.args) > 0: + self.__in_type_hint.add(node) node.args[0].visit(self) + self.__in_type_hint.discard(node) + for arg in node.args[1:]: + arg.visit(self) return False return True diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 36fd19e5..11351ff1 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1038,7 +1038,7 @@ class ScopeProviderTest(UnitTest): m, scopes = get_scope_metadata_provider( """ from typing import Literal, NewType, Optional, TypeVar, Callable, cast - from a import A, B, C, D, D2, E, E2, F, G, G2, H, I, J, K, K2 + from a import A, B, C, D, D2, E, E2, F, G, G2, H, I, J, K, K2, L, M def x(a: A): pass def y(b: "B"): @@ -1054,7 +1054,8 @@ class ScopeProviderTest(UnitTest): class Test(Generic[J]): pass - casted = cast("K", "K2") + castedK = cast("K", "K2") + castedL = cast("L", M) """ ) imp = ensure_type( @@ -1144,6 +1145,16 @@ class ScopeProviderTest(UnitTest): self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 0) + assignment = list(scope["L"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + + assignment = list(scope["M"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + def test_node_of_scopes(self) -> None: m, scopes = get_scope_metadata_provider( """ From 1326a0ee642db32c547540c971e1b6f79dcfbdc1 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 1 Dec 2020 13:01:29 +0000 Subject: [PATCH 089/632] fix assignment/access ordering in comprehensions (#423) --- libcst/metadata/scope_provider.py | 4 + libcst/metadata/tests/test_scope_provider.py | 102 +++++++++++++++++++ 2 files changed, 106 insertions(+) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index c5431796..00ae536b 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -36,6 +36,8 @@ from libcst.metadata.expression_context_provider import ( ) +# Comprehensions are handled separately in _visit_comp_alike due to +# the complexity of the semantics _ASSIGNMENT_LIKE_NODES = ( cst.AnnAssign, cst.AsName, @@ -976,6 +978,8 @@ class ScopeVisitor(cst.CSTVisitor): self.provider.set_metadata(for_in, self.scope) with self._new_scope(ComprehensionScope, node): for_in.target.visit(self) + # Things from here on can refer to the target. + self.scope._assignment_count += 1 for condition in for_in.ifs: condition.visit(self) inner_for_in = for_in.inner_for_in diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 11351ff1..0903ebf8 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1465,3 +1465,105 @@ class ScopeProviderTest(UnitTest): b_global_refs = list(b_global_assignment.references) self.assertEqual(len(b_global_refs), 1) self.assertEqual(b_global_refs[0].node, second_print.args[0].value) + + def test_ordering_comprehension(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + def f(a): + [a for a in [] for b in a] + [b for a in [] for b in a] + [a for a in [] for a in []] + a = 1 + """ + ) + f = cst.ensure_type(m.body[0], cst.FunctionDef) + a_param = f.params.params[0].name + a_param_assignment = list(scopes[a_param]["a"])[0] + a_param_refs = list(a_param_assignment.references) + self.assertEqual(a_param_refs, []) + first_comp = cst.ensure_type( + cst.ensure_type( + cst.ensure_type(f.body.body[0], cst.SimpleStatementLine).body[0], + cst.Expr, + ).value, + cst.ListComp, + ) + a_comp_assignment = list(scopes[first_comp.elt]["a"])[0] + self.assertEqual(len(a_comp_assignment.references), 2) + self.assertIn( + first_comp.elt, [ref.node for ref in a_comp_assignment.references] + ) + + second_comp = cst.ensure_type( + cst.ensure_type( + cst.ensure_type(f.body.body[1], cst.SimpleStatementLine).body[0], + cst.Expr, + ).value, + cst.ListComp, + ) + b_comp_assignment = list(scopes[second_comp.elt]["b"])[0] + self.assertEqual(len(b_comp_assignment.references), 1) + a_second_comp_assignment = list(scopes[second_comp.elt]["a"])[0] + self.assertEqual(len(a_second_comp_assignment.references), 1) + + third_comp = cst.ensure_type( + cst.ensure_type( + cst.ensure_type(f.body.body[2], cst.SimpleStatementLine).body[0], + cst.Expr, + ).value, + cst.ListComp, + ) + a_third_comp_assignments = list(scopes[third_comp.elt]["a"]) + self.assertEqual(len(a_third_comp_assignments), 2) + a_third_comp_access = list(scopes[third_comp.elt].accesses)[0] + self.assertEqual(a_third_comp_access.node, third_comp.elt) + # We record both assignments because it's impossible to know which one + # the access refers to without running the program + self.assertEqual(len(a_third_comp_access.referents), 2) + inner_for_in = third_comp.for_in.inner_for_in + self.assertIsNotNone(inner_for_in) + if inner_for_in: + self.assertIn( + inner_for_in.target, + { + ref.node + for ref in a_third_comp_access.referents + if isinstance(ref, Assignment) + }, + ) + + a_global = ( + cst.ensure_type( + cst.ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.Assign + ) + .targets[0] + .target + ) + a_global_assignment = list(scopes[a_global]["a"])[0] + a_global_refs = list(a_global_assignment.references) + self.assertEqual(a_global_refs, []) + + def test_ordering_comprehension_confusing(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + def f(a): + [a for a in a] + a = 1 + """ + ) + f = cst.ensure_type(m.body[0], cst.FunctionDef) + a_param = f.params.params[0].name + a_param_assignment = list(scopes[a_param]["a"])[0] + a_param_refs = list(a_param_assignment.references) + self.assertEqual(len(a_param_refs), 1) + comp = cst.ensure_type( + cst.ensure_type( + cst.ensure_type(f.body.body[0], cst.SimpleStatementLine).body[0], + cst.Expr, + ).value, + cst.ListComp, + ) + a_comp_assignment = list(scopes[comp.elt]["a"])[0] + self.assertEqual(list(a_param_refs)[0].node, comp.for_in.iter) + self.assertEqual(len(a_comp_assignment.references), 1) + self.assertEqual(list(a_comp_assignment.references)[0].node, comp.elt) From 8eee3cc9222405bd4cbf233ff71392218f18d2da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Tue, 1 Dec 2020 11:21:15 -0800 Subject: [PATCH 090/632] Bump to version 0.3.15 (#425) --- CHANGELOG.md | 9 +++++++++ libcst/_version.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dd0d1673..9ac107a0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,12 @@ +# 0.3.15 - 2020-12-01 + +## Added +- Support Named Unicode Characters and yield in f-strings [#424](https://github.com/Instagram/LibCST/pull/424) + +## Fixed +- Assignment/access ordering in comprehensions [#423](https://github.com/Instagram/LibCST/pull/423) +- Referencing of remaining objects in cast() [#422](https://github.com/Instagram/LibCST/pull/422) + # 0.3.14 - 2020-11-18 ## Fixed diff --git a/libcst/_version.py b/libcst/_version.py index fa483e91..08752152 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.14" +LIBCST_VERSION: str = "0.3.15" From 2f117f0bc3f718b9cc203dfbd6c9f0530ec043c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Mon, 7 Dec 2020 17:48:09 -0800 Subject: [PATCH 091/632] Fix for not parsing subscripts such as cast()["from"] (#428) --- libcst/metadata/scope_provider.py | 11 ++-- libcst/metadata/tests/test_scope_provider.py | 57 ++++++++++++++++++++ 2 files changed, 63 insertions(+), 5 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 00ae536b..580cc112 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -825,11 +825,12 @@ class ScopeVisitor(cst.CSTVisitor): return False def visit_Subscript(self, node: cst.Subscript) -> Optional[bool]: - qnames = {qn.name for qn in self.scope.get_qualified_names_for(node.value)} - if any(qn.startswith(("typing.", "typing_extensions.")) for qn in qnames): - self.__in_type_hint.add(node) - if "typing.Literal" in qnames or "typing_extensions.Literal" in qnames: - self.__in_ignored_subscript.add(node) + if isinstance(node.value, cst.Name): + qnames = {qn.name for qn in self.scope.get_qualified_names_for(node.value)} + if any(qn.startswith(("typing.", "typing_extensions.")) for qn in qnames): + self.__in_type_hint.add(node) + if "typing.Literal" in qnames or "typing_extensions.Literal" in qnames: + self.__in_ignored_subscript.add(node) return True def leave_Subscript(self, original_node: cst.Subscript) -> None: diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 0903ebf8..c8a2b74c 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1567,3 +1567,60 @@ class ScopeProviderTest(UnitTest): self.assertEqual(list(a_param_refs)[0].node, comp.for_in.iter) self.assertEqual(len(a_comp_assignment.references), 1) self.assertEqual(list(a_comp_assignment.references)[0].node, comp.elt) + + def test_cast(self) -> None: + with self.assertRaises(cst.ParserSyntaxError): + m, scopes = get_scope_metadata_provider( + """ + from typing import TypeVar + TypeVar("Name", "3rr0r") + """ + ) + + try: + m, scopes = get_scope_metadata_provider( + """ + from typing import TypeVar + TypeVar("3rr0r", "int") + """ + ) + except cst.ParserSyntaxError: + self.fail( + "First string argument of NewType and TypeVar should not be parsed" + ) + + with self.assertRaises(cst.ParserSyntaxError): + m, scopes = get_scope_metadata_provider( + """ + from typing import Dict + Dict["str", "3rr0r"] + """ + ) + + try: + m, scopes = get_scope_metadata_provider( + """ + from typing import Dict, cast + cast(Dict[str, str], {})["3rr0r"] + """ + ) + except cst.ParserSyntaxError: + self.fail("Subscript of function calls should not be parsed") + + try: + m, scopes = get_scope_metadata_provider( + """ + from typing import cast + cast(str, "3rr0r") + """ + ) + except cst.ParserSyntaxError: + self.fail("String arguments of cast should not be parsed") + + with self.assertRaises(cst.ParserSyntaxError): + m, scopes = get_scope_metadata_provider( + """ + from typing import cast + cast("3rr0r", "") + """ + ) From 5940211af5b409c47f4f0962f711f0c46791ca47 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 15 Dec 2020 08:58:56 +0000 Subject: [PATCH 092/632] Handle scope ordering in For statements (#430) * Handle scope ordering in For statements * no need to increment after For anymore --- libcst/metadata/scope_provider.py | 9 +++++++- libcst/metadata/tests/test_scope_provider.py | 23 ++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 580cc112..f4d35836 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -45,7 +45,6 @@ _ASSIGNMENT_LIKE_NODES = ( cst.AugAssign, cst.ClassDef, cst.CompFor, - cst.For, cst.FunctionDef, cst.Global, cst.Import, @@ -993,6 +992,14 @@ class ScopeVisitor(cst.CSTVisitor): node.elt.visit(self) return False + def visit_For(self, node: cst.For) -> Optional[bool]: + node.target.visit(self) + self.scope._assignment_count += 1 + for child in [node.iter, node.body, node.orelse, node.asynchronous]: + if child is not None: + child.visit(self) + return False + def infer_accesses(self) -> None: # Aggregate access with the same name and batch add with set union as an optimization. # In worst case, all accesses (m) and assignments (n) refer to the same name, diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index c8a2b74c..a84d0532 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1568,6 +1568,29 @@ class ScopeProviderTest(UnitTest): self.assertEqual(len(a_comp_assignment.references), 1) self.assertEqual(list(a_comp_assignment.references)[0].node, comp.elt) + def test_for_scope_ordering(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + def f(): + for x in []: + x + class X: + def f(): + for x in []: + x + """ + ) + for scope in scopes.values(): + for acc in scope.accesses: + self.assertEqual( + len(acc.referents), + 1, + msg=( + "Access for node has incorrect number of referents: " + + f"{acc.node}" + ), + ) + def test_cast(self) -> None: with self.assertRaises(cst.ParserSyntaxError): m, scopes = get_scope_metadata_provider( From 660e53361084a712a8572f6f5fe521bb4a8f7148 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 15 Dec 2020 12:46:15 +0000 Subject: [PATCH 093/632] Don't allow out of order accesses in the global scope (#431) --- libcst/metadata/scope_provider.py | 2 +- libcst/metadata/tests/test_scope_provider.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index f4d35836..b5c6ba68 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -124,7 +124,7 @@ class Access: for assignment in assignments if assignment.scope != self.scope or assignment._index < self.__index } - if not previous_assignments and assignments: + if not previous_assignments and assignments and self.scope.parent != self.scope: previous_assignments = self.scope.parent[name] self.__assignments |= previous_assignments diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index a84d0532..d1566aac 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1591,6 +1591,24 @@ class ScopeProviderTest(UnitTest): ), ) + def test_no_out_of_order_references_in_global_scope(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + x = y + y = 1 + """ + ) + for scope in scopes.values(): + for acc in scope.accesses: + self.assertEqual( + len(acc.referents), + 0, + msg=( + "Access for node has incorrect number of referents: " + + f"{acc.node}" + ), + ) + def test_cast(self) -> None: with self.assertRaises(cst.ParserSyntaxError): m, scopes = get_scope_metadata_provider( From 88dd0c39f0edc8309027e261d5ccfb09c1309cb4 Mon Sep 17 00:00:00 2001 From: Ben Green Date: Tue, 15 Dec 2020 08:26:35 -0500 Subject: [PATCH 094/632] Support PEP-604 style unions in decorator annotations (#429) These unions were introduced in Python 3.10 and do not define __origin__, so some extra checks are necessary to identify then. Since there is not yet a 3.10 build, a somewhat hacky test was added to simulate one of these new Unions. Resolves #414. --- libcst/matchers/_visitors.py | 12 +++++++++++- libcst/matchers/tests/test_decorators.py | 23 +++++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index 301e675a..be50edfd 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -79,8 +79,18 @@ def _get_possible_match_classes(matcher: BaseMatcherNode) -> List[Type[cst.CSTNo return [getattr(cst, matcher.__class__.__name__)] -def _get_possible_annotated_classes(annotation: object) -> List[Type[object]]: +def _annotation_looks_like_union(annotation: object) -> bool: if getattr(annotation, "__origin__", None) is Union: + return True + # support PEP-604 style unions introduced in Python 3.10 + return ( + annotation.__class__.__name__ == "Union" + and annotation.__class__.__module__ == "types" + ) + + +def _get_possible_annotated_classes(annotation: object) -> List[Type[object]]: + if _annotation_looks_like_union(annotation): return getattr(annotation, "__args__", []) else: return [cast(Type[object], annotation)] diff --git a/libcst/matchers/tests/test_decorators.py b/libcst/matchers/tests/test_decorators.py index c102f2ab..b1ff3d05 100644 --- a/libcst/matchers/tests/test_decorators.py +++ b/libcst/matchers/tests/test_decorators.py @@ -6,6 +6,7 @@ from ast import literal_eval from textwrap import dedent from typing import List, Set +from unittest.mock import Mock import libcst as cst import libcst.matchers as m @@ -993,3 +994,25 @@ class MatchersVisitLeaveDecoratorsTest(UnitTest): # We should have only visited a select number of nodes. self.assertEqual(visitor.visits, ['"baz"']) + + +# This is meant to simulate `cst.ImportFrom | cst.RemovalSentinel` in py3.10 +FakeUnionClass: Mock = Mock() +setattr(FakeUnionClass, "__name__", "Union") +setattr(FakeUnionClass, "__module__", "types") +FakeUnion: Mock = Mock() +FakeUnion.__class__ = FakeUnionClass +FakeUnion.__args__ = [cst.ImportFrom, cst.RemovalSentinel] + + +class MatchersUnionDecoratorsTest(UnitTest): + def test_init_with_new_union_annotation(self) -> None: + class TransformerWithUnionReturnAnnotation(m.MatcherDecoratableTransformer): + @m.leave(m.ImportFrom(module=m.Name(value="typing"))) + def test( + self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom + ) -> FakeUnion: + pass + + # assert that init (specifically _check_types on return annotation) passes + TransformerWithUnionReturnAnnotation() From 753a4f575ef238ce957bf192628f64d8d015ca58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Tue, 15 Dec 2020 18:17:44 -0800 Subject: [PATCH 095/632] Gathering exports in augmented assignment statements (#426) --- libcst/codemod/visitors/_gather_exports.py | 16 ++++++++++++++++ .../visitors/tests/test_gather_exports.py | 12 ++++++++++++ 2 files changed, 28 insertions(+) diff --git a/libcst/codemod/visitors/_gather_exports.py b/libcst/codemod/visitors/_gather_exports.py index 967f01e9..5186415c 100644 --- a/libcst/codemod/visitors/_gather_exports.py +++ b/libcst/codemod/visitors/_gather_exports.py @@ -6,6 +6,7 @@ from typing import Set, Union import libcst as cst +import libcst.matchers as m from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareVisitor from libcst.helpers import get_full_name_for_node @@ -53,6 +54,21 @@ class GatherExportsVisitor(ContextAwareVisitor): return True return False + def visit_AugAssign(self, node: cst.AugAssign) -> bool: + if m.matches( + node, + m.AugAssign( + target=m.Name("__all__"), + operator=m.AddAssign(), + value=m.List() | m.Tuple(), + ), + ): + value = node.value + if isinstance(value, (cst.List, cst.Tuple)): + self._is_assigned_export.add(value) + return True + return False + def visit_Assign(self, node: cst.Assign) -> bool: for target_node in node.targets: if self._handle_assign_target(target_node.target, node.value): diff --git a/libcst/codemod/visitors/tests/test_gather_exports.py b/libcst/codemod/visitors/tests/test_gather_exports.py index 916eb5a2..56d885e7 100644 --- a/libcst/codemod/visitors/tests/test_gather_exports.py +++ b/libcst/codemod/visitors/tests/test_gather_exports.py @@ -47,6 +47,18 @@ class TestGatherExportsVisitor(UnitTest): gatherer = self.gather_exports(code) self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"}) + def test_gather_exports_simple2(self) -> None: + code = """ + from foo import bar + from biz import baz + + __all__ = ["bar"] + __all__ += ["baz"] + """ + + gatherer = self.gather_exports(code) + self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"}) + def test_gather_exports_simple_set(self) -> None: code = """ from foo import bar From 1571cddc0cb3f57039619623d5c57e9e5a43ca96 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 16 Dec 2020 19:58:25 +0000 Subject: [PATCH 096/632] Walrus operator's left hand side now has STORE expression context (#433) --- .../metadata/expression_context_provider.py | 7 ++++++ .../tests/test_expression_context_provider.py | 20 +++++++++++++++++ libcst/metadata/tests/test_scope_provider.py | 22 +++++++++++++++++++ 3 files changed, 49 insertions(+) diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index b06ba113..d9dffa12 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -84,6 +84,13 @@ class ExpressionContextVisitor(cst.CSTVisitor): node.value.visit(self) return False + def visit_NamedExpr(self, node: cst.NamedExpr) -> bool: + node.target.visit( + ExpressionContextVisitor(self.provider, ExpressionContext.STORE) + ) + node.value.visit(self) + return False + def visit_Name(self, node: cst.Name) -> bool: self.provider.set_metadata(node, self.context) return False diff --git a/libcst/metadata/tests/test_expression_context_provider.py b/libcst/metadata/tests/test_expression_context_provider.py index 25cc1d0d..91008df6 100644 --- a/libcst/metadata/tests/test_expression_context_provider.py +++ b/libcst/metadata/tests/test_expression_context_provider.py @@ -411,3 +411,23 @@ class ExpressionContextProviderTest(UnitTest): }, ) ) + + def test_walrus(self) -> None: + code = """ + if x := y: + pass + """ + wrapper = MetadataWrapper( + parse_module( + dedent(code), config=cst.PartialParserConfig(python_version="3.8") + ) + ) + wrapper.visit( + DependentVisitor( + test=self, + name_to_context={ + "x": ExpressionContext.STORE, + "y": ExpressionContext.LOAD, + }, + ) + ) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index d1566aac..27a8f495 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -4,6 +4,7 @@ # LICENSE file in the root directory of this source tree. +import sys from textwrap import dedent from typing import Mapping, Tuple, cast @@ -1609,6 +1610,27 @@ class ScopeProviderTest(UnitTest): ), ) + def test_walrus_accesses(self) -> None: + if sys.version_info < (3, 8): + self.skipTest("This python version doesn't support :=") + m, scopes = get_scope_metadata_provider( + """ + if x := y: + y = 1 + x + """ + ) + for scope in scopes.values(): + for acc in scope.accesses: + self.assertEqual( + len(acc.referents), + 1 if getattr(acc.node, "value") == "x" else 0, + msg=( + "Access for node has incorrect number of referents: " + + f"{acc.node}" + ), + ) + def test_cast(self) -> None: with self.assertRaises(cst.ParserSyntaxError): m, scopes = get_scope_metadata_provider( From df231f3fa5ac3dfa7b14a30103249e5da79509e3 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 16 Dec 2020 20:07:26 +0000 Subject: [PATCH 097/632] bump version to 0.3.16 (#432) --- CHANGELOG.md | 12 ++++++++++++ libcst/_version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ac107a0..52ba005f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +# 0.3.16 - 2020-12-16 + +## Added +- Support PEP-604 style unions in decorator annotations [#429](https://github.com/Instagram/LibCST/pull/429) +- Gathering exports in augmented assignment statements [#426](https://github.com/Instagram/LibCST/pull/426) + +## Fixed +- Don't allow out of order accesses in the global scope [#431](https://github.com/Instagram/LibCST/pull/431) +- Handle scope ordering in For statements [#430](https://github.com/Instagram/LibCST/pull/430) +- Fix for not parsing subscripts such as `cast()["from"]` [#428](https://github.com/Instagram/LibCST/pull/428) +- Walrus operator's left hand side now has STORE expression context [#433](https://github.com/Instagram/LibCST/pull/433) + # 0.3.15 - 2020-12-01 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index 08752152..7f61e7b8 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.15" +LIBCST_VERSION: str = "0.3.16" From ac381613ad72ff172c1dd4294d9b36ae501cd855 Mon Sep 17 00:00:00 2001 From: Sebastian Kreft Date: Mon, 4 Jan 2021 11:09:27 -0300 Subject: [PATCH 098/632] optimization: reduce the number of unused parallel processes (#440) Instead of always launching the specified number of jobs or cpu_count, we should take into account how many files need to be processed and how many files are delivered at once to each process. Note that this is similar to what fixit does, although they don't specify a chunk size. --- libcst/codemod/_cli.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 985684c3..edd1c701 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -576,8 +576,12 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 total = len(files) progress = Progress(enabled=not hide_progress, total=total) + chunksize = 4 # Grab number of cores if we need to - jobs: int = jobs if jobs is not None else cpu_count() + jobs: int = min( + jobs if jobs is not None else cpu_count(), + (len(files) + chunksize - 1) // chunksize, + ) if jobs < 1: raise Exception("Must have at least one job to process!") @@ -646,7 +650,9 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 for filename in files ] try: - for result in p.imap_unordered(_execute_transform_wrap, args, chunksize=4): + for result in p.imap_unordered( + _execute_transform_wrap, args, chunksize=chunksize + ): # Print an execution result, keep track of failures _print_parallel_result( result, From c22ed6a4c67e5f16f0b928ca8a0c2944056eb216 Mon Sep 17 00:00:00 2001 From: Maggie Moss Date: Thu, 7 Jan 2021 14:22:50 -0800 Subject: [PATCH 099/632] Fix bug in apply annotations, add test. (#445) --- .../visitors/_apply_type_annotations.py | 16 ++++++++++------ .../tests/test_apply_type_annotations.py | 18 ++++++++++++++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 2090c151..393a560b 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -70,12 +70,16 @@ class TypeCollector(cst.CSTVisitor): def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: self.qualifier.append(node.name.value) returns = node.returns - if returns is not None: - return_annotation = self._create_import_from_annotation(returns) - parameter_annotations = self._import_parameter_annotations(node.params) - self.function_annotations[".".join(self.qualifier)] = FunctionAnnotation( - parameters=parameter_annotations, returns=return_annotation - ) + return_annotation = ( + self._create_import_from_annotation(returns) + if returns is not None + else None + ) + parameter_annotations = self._import_parameter_annotations(node.params) + self.function_annotations[".".join(self.qualifier)] = FunctionAnnotation( + parameters=parameter_annotations, returns=return_annotation + ) + # pyi files don't support inner functions, return False to stop the traversal. return False diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 90e7b58c..c37fbc69 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -651,6 +651,24 @@ class TestApplyAnnotationsVisitor(CodemodTest): example: Dict[str, Type[foo.Example]] = { "test": foo() } """, ), + ( + """ + from typing import Optional + + class A: + def foo(self, atticus, b: Optional[int] = None, c: bool = False): ... + """, + """ + class A: + def foo(self, atticus, b = None, c = False): ... + """, + """ + from typing import Optional + + class A: + def foo(self, atticus, b: Optional[int] = None, c: bool = False): ... + """, + ), ) ) def test_annotate_functions(self, stub: str, before: str, after: str) -> None: From f78de1b786a58194783db371f77ced9b2216f13f Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Wed, 27 Jan 2021 10:02:17 +0000 Subject: [PATCH 100/632] Fix context.scratch leaking across files (#453) --- libcst/codemod/_cli.py | 1 + libcst/tests/test_e2e.py | 77 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 78 insertions(+) create mode 100644 libcst/tests/test_e2e.py diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index edd1c701..ed740f24 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -279,6 +279,7 @@ def _execute_transform( # noqa: C901 transformer.context, filename=filename, full_module_name=_calculate_module(config.repo_root, filename), + scratch={}, ) # Run the transform, bail if we failed or if we aren't formatting code diff --git a/libcst/tests/test_e2e.py b/libcst/tests/test_e2e.py new file mode 100644 index 00000000..1e68ed6c --- /dev/null +++ b/libcst/tests/test_e2e.py @@ -0,0 +1,77 @@ +import contextlib +import os +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import Generator +from unittest import TestCase + +from libcst import BaseExpression, Call, Name, matchers as m +from libcst.codemod import ( + CodemodContext, + VisitorBasedCodemodCommand, + gather_files, + parallel_exec_transform_with_prettyprint, +) +from libcst.codemod.visitors import AddImportsVisitor + + +class PrintToPPrintCommand(VisitorBasedCodemodCommand): + def leave_Call(self, original_node: Call, updated_node: Call) -> BaseExpression: + if m.matches(updated_node, m.Call(func=m.Name("print"))): + AddImportsVisitor.add_needed_import( + self.context, + "pprint", + "pprint", + ) + return updated_node.with_changes(func=Name("pprint")) + return super().leave_Call(original_node, updated_node) + + +@contextlib.contextmanager +def temp_workspace() -> Generator[Path, None, None]: + cwd = os.getcwd() + with TemporaryDirectory() as temp_dir: + try: + ws = Path(temp_dir).resolve() + os.chdir(ws) + yield ws + finally: + os.chdir(cwd) + + +class ToolE2ETest(TestCase): + def test_leaky_codemod(self) -> None: + with temp_workspace() as tmp: + # File to trigger codemod + example: Path = tmp / "example.py" + example.write_text("""print("Hello")""") + # File that should not be modified + other = tmp / "other.py" + other.touch() + + # Run command + command_instance = PrintToPPrintCommand(CodemodContext()) + files = gather_files(".") + result = parallel_exec_transform_with_prettyprint( + command_instance, + files, + format_code=False, + hide_progress=True, + ) + + # Check results + self.assertEqual(2, result.successes) + self.assertEqual(0, result.skips) + self.assertEqual(0, result.failures) + # Expect example.py to be modified + self.assertIn( + "from pprint import pprint", + example.read_text(), + "import missing in example.py", + ) + # Expect other.py to NOT be modified + self.assertNotIn( + "from pprint import pprint", + other.read_text(), + "import found in other.py", + ) From 546f6289d3041a64a38b05f41ec7eb23728bc291 Mon Sep 17 00:00:00 2001 From: Zac Hatfield-Dodds Date: Wed, 27 Jan 2021 21:14:37 +1100 Subject: [PATCH 101/632] Use dummy pool for jobs=1 (#436) --- libcst/codemod/_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index ed740f24..97fe8a09 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -619,7 +619,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 python_version=python_version, ) - if total == 1: + if total == 1 or jobs == 1: # Simple case, we should not pay for process overhead. # Let's just use a dummy synchronous pool. jobs = 1 From dacafdea8cadf5c4467549ee6d65b446786d59b2 Mon Sep 17 00:00:00 2001 From: Caleb Donovick Date: Mon, 8 Feb 2021 10:51:49 -0800 Subject: [PATCH 102/632] Update README.rst: venv dir must **not** be inside the libcst dir (#454) Just spent a couple hours trying to figure out why I couldn't get pyre to work. Only to figure out that having my venv inside the libcst dir was breaking pyre. --- README.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/README.rst b/README.rst index 6d477eae..a2e999f9 100644 --- a/README.rst +++ b/README.rst @@ -180,6 +180,7 @@ To set up pyre check environment: 1. Copy the example Pyre config: ``cp .pyre_configuration.example .pyre_configuration``. 2. In the config file, add your venv site-packages dir to "search_path". (e.g. add "/workspace/libcst-env/lib/python3.7/site-packages") + Note: venv dir must **not** be inside the libcst dir 3. Remove installed LibCST and install from the source code: .. code-block:: shell From 39ff458270c3790f9c283f83522ee5b0f607163d Mon Sep 17 00:00:00 2001 From: Jimmy Lai Date: Mon, 8 Feb 2021 13:57:30 -0800 Subject: [PATCH 103/632] Bump version to 0.3.17 (#456) --- CHANGELOG.md | 11 +++++++++++ libcst/_version.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 52ba005f..716ce121 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +# 0.3.17 - 2021-02-08 + +## Updated +- Optimization: reduce the number of unused parallel processes [#440](https://github.com/Instagram/LibCST/pull/440) + +## Fixed +- Walrus operator's left hand side now has STORE expression context [#443](https://github.com/Instagram/LibCST/pull/433) +- ApplyTypeAnnotationsVisitor applies parameter annotations even if no return type is declared [#445](https://github.com/Instagram/LibCST/pull/445) +- Work around Windows problem by using dummy pool for `jobs=1` [#436](https://github.com/Instagram/LibCST/pull/436) +- Remove extra unused imports added in other files [#453](https://github.com/Instagram/LibCST/pull/453) + # 0.3.16 - 2020-12-16 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index 7f61e7b8..b6558851 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.16" +LIBCST_VERSION: str = "0.3.17" From 507b453e74e8bdd119d92d63ba84a4aa8f00c347 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 12 Mar 2021 10:10:44 +0000 Subject: [PATCH 104/632] Split QualifiedNameProvider out from libcst.metadata.scope_provider (#464) * Split QualifiedNameProvider out of scope_provider.py * merge test_qualified_name_provider into test_name_provider --- libcst/metadata/__init__.py | 2 +- libcst/metadata/name_provider.py | 78 +++++++++++++++++++ libcst/metadata/scope_provider.py | 68 ---------------- ...name_provider.py => test_name_provider.py} | 44 ++++++++++- 4 files changed, 120 insertions(+), 72 deletions(-) create mode 100644 libcst/metadata/name_provider.py rename libcst/metadata/tests/{test_qualified_name_provider.py => test_name_provider.py} (87%) diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index 2e70e9df..477a631f 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -16,6 +16,7 @@ from libcst.metadata.expression_context_provider import ( ExpressionContextProvider, ) from libcst.metadata.full_repo_manager import FullRepoManager +from libcst.metadata.name_provider import QualifiedNameProvider from libcst.metadata.parent_node_provider import ParentNodeProvider from libcst.metadata.position_provider import ( PositionProvider, @@ -37,7 +38,6 @@ from libcst.metadata.scope_provider import ( FunctionScope, GlobalScope, QualifiedName, - QualifiedNameProvider, QualifiedNameSource, Scope, ScopeProvider, diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py new file mode 100644 index 00000000..5072399b --- /dev/null +++ b/libcst/metadata/name_provider.py @@ -0,0 +1,78 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Collection, Optional, Union + +import libcst as cst +from libcst._metadata_dependent import MetadataDependent +from libcst.metadata.base_provider import BatchableMetadataProvider +from libcst.metadata.scope_provider import QualifiedName, ScopeProvider + + +class QualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedName]]): + """ + Compute possible qualified names of a variable CSTNode + (extends `PEP-3155 `_). + It uses the + :func:`~libcst.metadata.Scope.get_qualified_names_for` underlying to get qualified names. + Multiple qualified names may be returned, such as when we have conditional imports or an + import shadows another. E.g., the provider finds ``a.b``, ``d.e`` and + ``f.g`` as possible qualified names of ``c``:: + + >>> wrapper = MetadataWrapper( + >>> cst.parse_module(dedent( + >>> ''' + >>> if something: + >>> from a import b as c + >>> elif otherthing: + >>> from d import e as c + >>> else: + >>> from f import g as c + >>> c() + >>> ''' + >>> )) + >>> ) + >>> call = wrapper.module.body[1].body[0].value + >>> wrapper.resolve(QualifiedNameProvider)[call], + { + QualifiedName(name="a.b", source=QualifiedNameSource.IMPORT), + QualifiedName(name="d.e", source=QualifiedNameSource.IMPORT), + QualifiedName(name="f.g", source=QualifiedNameSource.IMPORT), + } + + For qualified name of a variable in a function or a comprehension, please refer + :func:`~libcst.metadata.Scope.get_qualified_names_for` for more detail. + """ + + METADATA_DEPENDENCIES = (ScopeProvider,) + + def visit_Module(self, node: cst.Module) -> Optional[bool]: + visitor = QualifiedNameVisitor(self) + node.visit(visitor) + + @staticmethod + def has_name( + visitor: MetadataDependent, node: cst.CSTNode, name: Union[str, QualifiedName] + ) -> bool: + """Check if any of qualified name has the str name or :class:`~libcst.metadata.QualifiedName` name.""" + qualified_names = visitor.get_metadata(QualifiedNameProvider, node, set()) + if isinstance(name, str): + return any(qn.name == name for qn in qualified_names) + else: + return any(qn == name for qn in qualified_names) + + +class QualifiedNameVisitor(cst.CSTVisitor): + def __init__(self, provider: "QualifiedNameProvider") -> None: + self.provider: QualifiedNameProvider = provider + + def on_visit(self, node: cst.CSTNode) -> bool: + scope = self.provider.get_metadata(ScopeProvider, node, None) + if scope: + self.provider.set_metadata(node, scope.get_qualified_names_for(node)) + else: + self.provider.set_metadata(node, set()) + super().on_visit(node) + return True diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index b5c6ba68..77ceafd1 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -27,7 +27,6 @@ from typing import ( import libcst as cst from libcst import ensure_type from libcst._add_slots import add_slots -from libcst._metadata_dependent import MetadataDependent from libcst.helpers import get_full_name_for_node from libcst.metadata.base_provider import BatchableMetadataProvider from libcst.metadata.expression_context_provider import ( @@ -1051,70 +1050,3 @@ class ScopeProvider(BatchableMetadataProvider[Optional[Scope]]): visitor = ScopeVisitor(self) node.visit(visitor) visitor.infer_accesses() - - -class QualifiedNameVisitor(cst.CSTVisitor): - def __init__(self, provider: "QualifiedNameProvider") -> None: - self.provider: QualifiedNameProvider = provider - - def on_visit(self, node: cst.CSTNode) -> bool: - scope = self.provider.get_metadata(ScopeProvider, node, None) - if scope: - self.provider.set_metadata(node, scope.get_qualified_names_for(node)) - else: - self.provider.set_metadata(node, set()) - super().on_visit(node) - return True - - -class QualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedName]]): - """ - Compute possible qualified names of a variable CSTNode - (extends `PEP-3155 `_). - It uses the - :func:`~libcst.metadata.Scope.get_qualified_names_for` underlying to get qualified names. - Multiple qualified names may be returned, such as when we have conditional imports or an - import shadows another. E.g., the provider finds ``a.b``, ``d.e`` and - ``f.g`` as possible qualified names of ``c``:: - - >>> wrapper = MetadataWrapper( - >>> cst.parse_module(dedent( - >>> ''' - >>> if something: - >>> from a import b as c - >>> elif otherthing: - >>> from d import e as c - >>> else: - >>> from f import g as c - >>> c() - >>> ''' - >>> )) - >>> ) - >>> call = wrapper.module.body[1].body[0].value - >>> wrapper.resolve(QualifiedNameProvider)[call], - { - QualifiedName(name="a.b", source=QualifiedNameSource.IMPORT), - QualifiedName(name="d.e", source=QualifiedNameSource.IMPORT), - QualifiedName(name="f.g", source=QualifiedNameSource.IMPORT), - } - - For qualified name of a variable in a function or a comprehension, please refer - :func:`~libcst.metadata.Scope.get_qualified_names_for` for more detail. - """ - - METADATA_DEPENDENCIES = (ScopeProvider,) - - def visit_Module(self, node: cst.Module) -> Optional[bool]: - visitor = QualifiedNameVisitor(self) - node.visit(visitor) - - @staticmethod - def has_name( - visitor: MetadataDependent, node: cst.CSTNode, name: Union[str, QualifiedName] - ) -> bool: - """Check if any of qualified name has the str name or :class:`~libcst.metadata.QualifiedName` name.""" - qualified_names = visitor.get_metadata(QualifiedNameProvider, node, set()) - if isinstance(name, str): - return any(qn.name == name for qn in qualified_names) - else: - return any(qn == name for qn in qualified_names) diff --git a/libcst/metadata/tests/test_qualified_name_provider.py b/libcst/metadata/tests/test_name_provider.py similarity index 87% rename from libcst/metadata/tests/test_qualified_name_provider.py rename to libcst/metadata/tests/test_name_provider.py index cf7fa68b..f995b65a 100644 --- a/libcst/metadata/tests/test_qualified_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -3,9 +3,8 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. - from textwrap import dedent -from typing import Collection, Mapping, Optional, Tuple +from typing import Collection, Mapping, Optional, Set, Tuple import libcst as cst from libcst import ensure_type @@ -25,7 +24,46 @@ def get_qualified_name_metadata_provider( return wrapper.module, wrapper.resolve(QualifiedNameProvider) -class ScopeProviderTest(UnitTest): +def get_qualified_names(module_str: str) -> Set[QualifiedName]: + _, qnames = get_qualified_name_metadata_provider(module_str) + return set().union(*qnames.values()) + + +class QualifiedNameProviderTest(UnitTest): + def test_imports(self) -> None: + qnames = get_qualified_names( + """ + from a.b import c as d + d + """ + ) + self.assertEqual({"a.b.c"}, {qname.name for qname in qnames}) + for qname in qnames: + self.assertEqual(qname.source, QualifiedNameSource.IMPORT, msg=f"{qname}") + + def test_builtins(self) -> None: + qnames = get_qualified_names( + """ + int(None) + """ + ) + self.assertEqual( + {"builtins.int", "builtins.None"}, {qname.name for qname in qnames} + ) + for qname in qnames: + self.assertEqual(qname.source, QualifiedNameSource.BUILTIN, msg=f"{qname}") + + def test_locals(self) -> None: + qnames = get_qualified_names( + """ + class X: + a: "X" + """ + ) + self.assertEqual({"X", "X.a"}, {qname.name for qname in qnames}) + for qname in qnames: + self.assertEqual(qname.source, QualifiedNameSource.LOCAL, msg=f"{qname}") + def test_simple_qualified_names(self) -> None: m, names = get_qualified_name_metadata_provider( """ From 0ee0831eb68644a95138b4e8c210fe5c88baf76d Mon Sep 17 00:00:00 2001 From: Caleb Donovick Date: Mon, 22 Mar 2021 23:23:40 -0700 Subject: [PATCH 105/632] Add FlattenSentinel to support replacing a statement with multiple statements (#455) * Add flatten_sentinal * Add FlattenSentinal to __all__ * Fix lint errors * Fix type errors * Update test to use leave_Return * Update and run codegen * Add empty test * Update docs * autofix --- docs/source/visitors.rst | 1 + libcst/__init__.py | 2 + libcst/_flatten_sentinel.py | 46 +++++++ libcst/_nodes/base.py | 23 ++-- libcst/_nodes/internal.py | 38 +++++- libcst/_nodes/tests/test_flatten_behavior.py | 79 +++++++++++ libcst/_typed_visitor.py | 133 +++++++++++++------ libcst/_types.py | 1 + libcst/_visitors.py | 3 +- libcst/codegen/gen_visitor_functions.py | 12 +- libcst/matchers/_matcher_base.py | 8 +- 11 files changed, 281 insertions(+), 65 deletions(-) create mode 100644 libcst/_flatten_sentinel.py create mode 100644 libcst/_nodes/tests/test_flatten_behavior.py diff --git a/docs/source/visitors.rst b/docs/source/visitors.rst index a2b9ee90..722959e1 100644 --- a/docs/source/visitors.rst +++ b/docs/source/visitors.rst @@ -7,6 +7,7 @@ Visitors .. autoclass:: libcst.CSTTransformer .. autofunction:: libcst.RemoveFromParent .. autoclass:: libcst.RemovalSentinel +.. autoclass:: libcst.FlattenSentinel Visit and Leave Helper Functions -------------------------------- diff --git a/libcst/__init__.py b/libcst/__init__.py index 39b0f6dc..cc71ce2a 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -5,6 +5,7 @@ from libcst._batched_visitor import BatchableCSTVisitor, visit_batched from libcst._exceptions import MetadataException, ParserSyntaxError +from libcst._flatten_sentinel import FlattenSentinel from libcst._maybe_sentinel import MaybeSentinel from libcst._metadata_dependent import MetadataDependent from libcst._nodes.base import CSTNode, CSTValidationError @@ -211,6 +212,7 @@ __all__ = [ "CSTValidationError", "CSTVisitor", "CSTVisitorT", + "FlattenSentinel", "MaybeSentinel", "MetadataException", "ParserSyntaxError", diff --git a/libcst/_flatten_sentinel.py b/libcst/_flatten_sentinel.py new file mode 100644 index 00000000..18148077 --- /dev/null +++ b/libcst/_flatten_sentinel.py @@ -0,0 +1,46 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import sys + + +# PEP 585 +if sys.version_info < (3, 9): + from typing import Iterable, Sequence +else: + from collections.abc import Iterable, Sequence + +from libcst._types import CSTNodeT_co + + +class FlattenSentinel(Sequence[CSTNodeT_co]): + """ + A :class:`FlattenSentinel` may be returned by a :meth:`CSTTransformer.on_leave` + method when one wants to replace a node with multiple nodes. The replaced + node must be contained in a `Sequence` attribute such as + :attr:`~libcst.Module.body`. This is generally the case for + :class:`~libcst.BaseStatement` and :class:`~libcst.BaseSmallStatement`. + For example to insert a print before every return:: + + def leave_Return( + self, original_node: cst.Return, updated_node: cst.Return + ) -> Union[cst.Return, cst.RemovalSentinel, cst.FlattenSentinel[cst.BaseSmallStatement]]: + log_stmt = cst.Expr(cst.parse_expression("print('returning')")) + return cst.FlattenSentinel([log_stmt, updated_node]) + + Returning an empty :class:`FlattenSentinel` is equivalent to returning + :attr:`cst.RemovalSentinel.REMOVE` and is subject to its requirements. + """ + + nodes: Sequence[CSTNodeT_co] + + def __init__(self, nodes: Iterable[CSTNodeT_co]) -> None: + self.nodes = tuple(nodes) + + def __getitem__(self, idx: int) -> CSTNodeT_co: + return self.nodes[idx] + + def __len__(self) -> int: + return len(self.nodes) diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index fe2988c9..47bf26ea 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -8,6 +8,7 @@ from copy import deepcopy from dataclasses import dataclass, field, fields, replace from typing import Any, Dict, List, Mapping, Sequence, TypeVar, Union, cast +from libcst._flatten_sentinel import FlattenSentinel from libcst._nodes.internal import CodegenState from libcst._removal_sentinel import RemovalSentinel from libcst._type_enforce import is_value_of_type @@ -207,7 +208,7 @@ class CSTNode(ABC): def visit( self: _CSTNodeSelfT, visitor: CSTVisitorT - ) -> Union[_CSTNodeSelfT, RemovalSentinel]: + ) -> Union[_CSTNodeSelfT, RemovalSentinel, FlattenSentinel[_CSTNodeSelfT]]: """ Visits the current node, its children, and all transitive children using the given visitor's callbacks. @@ -234,7 +235,7 @@ class CSTNode(ABC): leave_result = visitor.on_leave(self, with_updated_children) # validate return type of the user-defined `visitor.on_leave` method - if not isinstance(leave_result, (CSTNode, RemovalSentinel)): + if not isinstance(leave_result, (CSTNode, RemovalSentinel, FlattenSentinel)): raise Exception( "Expected a node of type CSTNode or a RemovalSentinel, " + f"but got a return value of {type(leave_result).__name__}" @@ -379,9 +380,9 @@ class CSTNode(ABC): child, all instances will be replaced. """ new_tree = self.visit(_ChildReplacementTransformer(old_node, new_node)) - if isinstance(new_tree, RemovalSentinel): - # The above transform never returns RemovalSentinel, so this isn't possible - raise Exception("Logic error, cannot get a RemovalSentinel here!") + if isinstance(new_tree, (FlattenSentinel, RemovalSentinel)): + # The above transform never returns *Sentinel, so this isn't possible + raise Exception("Logic error, cannot get a *Sentinal here!") return new_tree def deep_remove( @@ -392,10 +393,16 @@ class CSTNode(ABC): have previously modified the tree in a way that ``old_node`` appears more than once as a deep child, all instances will be removed. """ - return self.visit( + new_tree = self.visit( _ChildReplacementTransformer(old_node, RemovalSentinel.REMOVE) ) + if isinstance(new_tree, FlattenSentinel): + # The above transform never returns FlattenSentinel, so this isn't possible + raise Exception("Logic error, cannot get a FlattenSentinel here!") + + return new_tree + def with_deep_changes( self: _CSTNodeSelfT, old_node: "CSTNode", **changes: Any ) -> _CSTNodeSelfT: @@ -412,9 +419,9 @@ class CSTNode(ABC): similar API in the future. """ new_tree = self.visit(_ChildWithChangesTransformer(old_node, changes)) - if isinstance(new_tree, RemovalSentinel): + if isinstance(new_tree, (FlattenSentinel, RemovalSentinel)): # This is impossible with the above transform. - raise Exception("Logic error, cannot get a RemovalSentinel here!") + raise Exception("Logic error, cannot get a *Sentinel here!") return new_tree def __eq__(self: _CSTNodeSelfT, other: _CSTNodeSelfT) -> bool: diff --git a/libcst/_nodes/internal.py b/libcst/_nodes/internal.py index 4b5c7b00..5bbefc01 100644 --- a/libcst/_nodes/internal.py +++ b/libcst/_nodes/internal.py @@ -9,6 +9,7 @@ from dataclasses import dataclass, field from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Sequence, Union from libcst._add_slots import add_slots +from libcst._flatten_sentinel import FlattenSentinel from libcst._maybe_sentinel import MaybeSentinel from libcst._removal_sentinel import RemovalSentinel from libcst._types import CSTNodeT @@ -84,6 +85,13 @@ def visit_required( f"We got a RemovalSentinel while visiting a {type(node).__name__}. This " + "node's parent does not allow it to be removed." ) + elif isinstance(result, FlattenSentinel): + raise TypeError( + f"We got a FlattenSentinel while visiting a {type(node).__name__}. This " + + "node's parent does not allow for it to be it to be replaced with a " + + "sequence." + ) + visitor.on_leave_attribute(parent, fieldname) return result @@ -101,6 +109,12 @@ def visit_optional( return None visitor.on_visit_attribute(parent, fieldname) result = node.visit(visitor) + if isinstance(result, FlattenSentinel): + raise TypeError( + f"We got a FlattenSentinel while visiting a {type(node).__name__}. This " + + "node's parent does not allow for it to be it to be replaced with a " + + "sequence." + ) visitor.on_leave_attribute(parent, fieldname) return None if isinstance(result, RemovalSentinel) else result @@ -121,6 +135,12 @@ def visit_sentinel( return MaybeSentinel.DEFAULT visitor.on_visit_attribute(parent, fieldname) result = node.visit(visitor) + if isinstance(result, FlattenSentinel): + raise TypeError( + f"We got a FlattenSentinel while visiting a {type(node).__name__}. This " + + "node's parent does not allow for it to be it to be replaced with a " + + "sequence." + ) visitor.on_leave_attribute(parent, fieldname) return MaybeSentinel.DEFAULT if isinstance(result, RemovalSentinel) else result @@ -138,7 +158,9 @@ def visit_iterable( visitor.on_visit_attribute(parent, fieldname) for child in children: new_child = child.visit(visitor) - if not isinstance(new_child, RemovalSentinel): + if isinstance(new_child, FlattenSentinel): + yield from new_child + elif not isinstance(new_child, RemovalSentinel): yield new_child visitor.on_leave_attribute(parent, fieldname) @@ -179,11 +201,17 @@ def visit_body_iterable( # and the new child is. This means a RemovalSentinel # caused a child of this node to be dropped, and it # is now useless. - if (not child._is_removable()) and new_child._is_removable(): - continue - # Safe to yield child in this case. - yield new_child + if isinstance(new_child, FlattenSentinel): + for child_ in new_child: + if (not child._is_removable()) and child_._is_removable(): + continue + yield child_ + else: + if (not child._is_removable()) and new_child._is_removable(): + continue + # Safe to yield child in this case. + yield new_child visitor.on_leave_attribute(parent, fieldname) diff --git a/libcst/_nodes/tests/test_flatten_behavior.py b/libcst/_nodes/tests/test_flatten_behavior.py new file mode 100644 index 00000000..5f37067c --- /dev/null +++ b/libcst/_nodes/tests/test_flatten_behavior.py @@ -0,0 +1,79 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Type, Union + +import libcst as cst +from libcst import FlattenSentinel, RemovalSentinel, parse_expression, parse_module +from libcst._nodes.tests.base import CSTNodeTest +from libcst._types import CSTNodeT +from libcst._visitors import CSTTransformer +from libcst.testing.utils import data_provider + + +class InsertPrintBeforeReturn(CSTTransformer): + def leave_Return( + self, original_node: cst.Return, updated_node: cst.Return + ) -> Union[cst.Return, RemovalSentinel, FlattenSentinel[cst.BaseSmallStatement]]: + return FlattenSentinel( + [ + cst.Expr(parse_expression("print('returning')")), + updated_node, + ] + ) + + +class FlattenLines(CSTTransformer): + def on_leave( + self, original_node: CSTNodeT, updated_node: CSTNodeT + ) -> Union[CSTNodeT, RemovalSentinel, FlattenSentinel[cst.SimpleStatementLine]]: + if isinstance(updated_node, cst.SimpleStatementLine): + return FlattenSentinel( + [ + cst.SimpleStatementLine( + [stmt.with_changes(semicolon=cst.MaybeSentinel.DEFAULT)] + ) + for stmt in updated_node.body + ] + ) + else: + return updated_node + + +class RemoveReturnWithEmpty(CSTTransformer): + def leave_Return( + self, original_node: cst.Return, updated_node: cst.Return + ) -> Union[cst.Return, RemovalSentinel, FlattenSentinel[cst.BaseSmallStatement]]: + return FlattenSentinel([]) + + +class FlattenBehavior(CSTNodeTest): + @data_provider( + ( + ("return", "print('returning'); return", InsertPrintBeforeReturn), + ( + "print('returning'); return", + "print('returning')\nreturn", + FlattenLines, + ), + ( + "print('returning')\nreturn", + "print('returning')", + RemoveReturnWithEmpty, + ), + ) + ) + def test_flatten_pass_behavior( + self, before: str, after: str, visitor: Type[CSTTransformer] + ) -> None: + # Test doesn't have newline termination case + before_module = parse_module(before) + after_module = before_module.visit(visitor()) + self.assertEqual(after, after_module.code) + + # Test does have newline termination case + before_module = parse_module(before + "\n") + after_module = before_module.visit(visitor()) + self.assertEqual(after + "\n", after_module.code) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index bbc10d55..0246c718 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -7,6 +7,7 @@ # This file was generated by libcst.codegen.gen_matcher_classes from typing import TYPE_CHECKING, Optional, Union +from libcst._flatten_sentinel import FlattenSentinel from libcst._maybe_sentinel import MaybeSentinel from libcst._removal_sentinel import RemovalSentinel from libcst._typed_visitor_base import mark_no_op @@ -5284,7 +5285,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_AnnAssign( self, original_node: "AnnAssign", updated_node: "AnnAssign" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5296,7 +5299,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Arg( self, original_node: "Arg", updated_node: "Arg" - ) -> Union["Arg", RemovalSentinel]: + ) -> Union["Arg", FlattenSentinel["Arg"], RemovalSentinel]: return updated_node @mark_no_op @@ -5306,13 +5309,17 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Assert( self, original_node: "Assert", updated_node: "Assert" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op def leave_Assign( self, original_node: "Assign", updated_node: "Assign" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5324,7 +5331,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_AssignTarget( self, original_node: "AssignTarget", updated_node: "AssignTarget" - ) -> Union["AssignTarget", RemovalSentinel]: + ) -> Union["AssignTarget", FlattenSentinel["AssignTarget"], RemovalSentinel]: return updated_node @mark_no_op @@ -5342,7 +5349,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_AugAssign( self, original_node: "AugAssign", updated_node: "AugAssign" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5408,7 +5417,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Break( self, original_node: "Break", updated_node: "Break" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5420,7 +5431,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_ClassDef( self, original_node: "ClassDef", updated_node: "ClassDef" - ) -> Union["BaseStatement", RemovalSentinel]: + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node @mark_no_op @@ -5460,7 +5471,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_ComparisonTarget( self, original_node: "ComparisonTarget", updated_node: "ComparisonTarget" - ) -> Union["ComparisonTarget", RemovalSentinel]: + ) -> Union[ + "ComparisonTarget", FlattenSentinel["ComparisonTarget"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5472,19 +5485,23 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Continue( self, original_node: "Continue", updated_node: "Continue" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op def leave_Decorator( self, original_node: "Decorator", updated_node: "Decorator" - ) -> Union["Decorator", RemovalSentinel]: + ) -> Union["Decorator", FlattenSentinel["Decorator"], RemovalSentinel]: return updated_node @mark_no_op def leave_Del( self, original_node: "Del", updated_node: "Del" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5502,7 +5519,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_DictElement( self, original_node: "DictElement", updated_node: "DictElement" - ) -> Union["BaseDictElement", RemovalSentinel]: + ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: return updated_node @mark_no_op @@ -5520,13 +5537,13 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Dot( self, original_node: "Dot", updated_node: "Dot" - ) -> Union["Dot", RemovalSentinel]: + ) -> Union["Dot", FlattenSentinel["Dot"], RemovalSentinel]: return updated_node @mark_no_op def leave_Element( self, original_node: "Element", updated_node: "Element" - ) -> Union["BaseElement", RemovalSentinel]: + ) -> Union["BaseElement", FlattenSentinel["BaseElement"], RemovalSentinel]: return updated_node @mark_no_op @@ -5542,7 +5559,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_EmptyLine( self, original_node: "EmptyLine", updated_node: "EmptyLine" - ) -> Union["EmptyLine", RemovalSentinel]: + ) -> Union["EmptyLine", FlattenSentinel["EmptyLine"], RemovalSentinel]: return updated_node @mark_no_op @@ -5554,13 +5571,15 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_ExceptHandler( self, original_node: "ExceptHandler", updated_node: "ExceptHandler" - ) -> Union["ExceptHandler", RemovalSentinel]: + ) -> Union["ExceptHandler", FlattenSentinel["ExceptHandler"], RemovalSentinel]: return updated_node @mark_no_op def leave_Expr( self, original_node: "Expr", updated_node: "Expr" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5590,7 +5609,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_For( self, original_node: "For", updated_node: "For" - ) -> Union["BaseStatement", RemovalSentinel]: + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node @mark_no_op @@ -5604,13 +5623,21 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): self, original_node: "FormattedStringExpression", updated_node: "FormattedStringExpression", - ) -> Union["BaseFormattedStringContent", RemovalSentinel]: + ) -> Union[ + "BaseFormattedStringContent", + FlattenSentinel["BaseFormattedStringContent"], + RemovalSentinel, + ]: return updated_node @mark_no_op def leave_FormattedStringText( self, original_node: "FormattedStringText", updated_node: "FormattedStringText" - ) -> Union["BaseFormattedStringContent", RemovalSentinel]: + ) -> Union[ + "BaseFormattedStringContent", + FlattenSentinel["BaseFormattedStringContent"], + RemovalSentinel, + ]: return updated_node @mark_no_op @@ -5620,7 +5647,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_FunctionDef( self, original_node: "FunctionDef", updated_node: "FunctionDef" - ) -> Union["BaseStatement", RemovalSentinel]: + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node @mark_no_op @@ -5632,7 +5659,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Global( self, original_node: "Global", updated_node: "Global" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5650,7 +5679,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_If( self, original_node: "If", updated_node: "If" - ) -> Union["BaseStatement", RemovalSentinel]: + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node @mark_no_op @@ -5668,19 +5697,23 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Import( self, original_node: "Import", updated_node: "Import" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op def leave_ImportAlias( self, original_node: "ImportAlias", updated_node: "ImportAlias" - ) -> Union["ImportAlias", RemovalSentinel]: + ) -> Union["ImportAlias", FlattenSentinel["ImportAlias"], RemovalSentinel]: return updated_node @mark_no_op def leave_ImportFrom( self, original_node: "ImportFrom", updated_node: "ImportFrom" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5734,7 +5767,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_LeftParen( self, original_node: "LeftParen", updated_node: "LeftParen" - ) -> Union["LeftParen", MaybeSentinel, RemovalSentinel]: + ) -> Union[ + "LeftParen", MaybeSentinel, FlattenSentinel["LeftParen"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5836,7 +5871,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_NameItem( self, original_node: "NameItem", updated_node: "NameItem" - ) -> Union["NameItem", RemovalSentinel]: + ) -> Union["NameItem", FlattenSentinel["NameItem"], RemovalSentinel]: return updated_node @mark_no_op @@ -5854,7 +5889,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Nonlocal( self, original_node: "Nonlocal", updated_node: "Nonlocal" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5880,7 +5917,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Param( self, original_node: "Param", updated_node: "Param" - ) -> Union["Param", MaybeSentinel, RemovalSentinel]: + ) -> Union["Param", MaybeSentinel, FlattenSentinel["Param"], RemovalSentinel]: return updated_node @mark_no_op @@ -5912,7 +5949,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Pass( self, original_node: "Pass", updated_node: "Pass" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5934,13 +5973,17 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Raise( self, original_node: "Raise", updated_node: "Raise" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op def leave_Return( self, original_node: "Return", updated_node: "Return" - ) -> Union["BaseSmallStatement", RemovalSentinel]: + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5952,7 +5995,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_RightParen( self, original_node: "RightParen", updated_node: "RightParen" - ) -> Union["RightParen", MaybeSentinel, RemovalSentinel]: + ) -> Union[ + "RightParen", MaybeSentinel, FlattenSentinel["RightParen"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -5992,7 +6037,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_SimpleStatementLine( self, original_node: "SimpleStatementLine", updated_node: "SimpleStatementLine" - ) -> Union["BaseStatement", RemovalSentinel]: + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node @mark_no_op @@ -6022,13 +6067,13 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_StarredDictElement( self, original_node: "StarredDictElement", updated_node: "StarredDictElement" - ) -> Union["BaseDictElement", RemovalSentinel]: + ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: return updated_node @mark_no_op def leave_StarredElement( self, original_node: "StarredElement", updated_node: "StarredElement" - ) -> Union["BaseElement", RemovalSentinel]: + ) -> Union["BaseElement", FlattenSentinel["BaseElement"], RemovalSentinel]: return updated_node @mark_no_op @@ -6040,7 +6085,9 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_SubscriptElement( self, original_node: "SubscriptElement", updated_node: "SubscriptElement" - ) -> Union["SubscriptElement", RemovalSentinel]: + ) -> Union[ + "SubscriptElement", FlattenSentinel["SubscriptElement"], RemovalSentinel + ]: return updated_node @mark_no_op @@ -6064,7 +6111,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_Try( self, original_node: "Try", updated_node: "Try" - ) -> Union["BaseStatement", RemovalSentinel]: + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node @mark_no_op @@ -6082,19 +6129,19 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_While( self, original_node: "While", updated_node: "While" - ) -> Union["BaseStatement", RemovalSentinel]: + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node @mark_no_op def leave_With( self, original_node: "With", updated_node: "With" - ) -> Union["BaseStatement", RemovalSentinel]: + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node @mark_no_op def leave_WithItem( self, original_node: "WithItem", updated_node: "WithItem" - ) -> Union["WithItem", RemovalSentinel]: + ) -> Union["WithItem", FlattenSentinel["WithItem"], RemovalSentinel]: return updated_node @mark_no_op diff --git a/libcst/_types.py b/libcst/_types.py index 98342da8..b6b2ea9c 100644 --- a/libcst/_types.py +++ b/libcst/_types.py @@ -12,3 +12,4 @@ if TYPE_CHECKING: CSTNodeT = TypeVar("CSTNodeT", bound="CSTNode") +CSTNodeT_co = TypeVar("CSTNodeT_co", bound="CSTNode", covariant=True) diff --git a/libcst/_visitors.py b/libcst/_visitors.py index 1d710ff2..8da37dbf 100644 --- a/libcst/_visitors.py +++ b/libcst/_visitors.py @@ -5,6 +5,7 @@ from typing import TYPE_CHECKING, Union +from libcst._flatten_sentinel import FlattenSentinel from libcst._metadata_dependent import MetadataDependent from libcst._removal_sentinel import RemovalSentinel from libcst._typed_visitor import CSTTypedTransformerFunctions, CSTTypedVisitorFunctions @@ -49,7 +50,7 @@ class CSTTransformer(CSTTypedTransformerFunctions, MetadataDependent): def on_leave( self, original_node: CSTNodeT, updated_node: CSTNodeT - ) -> Union[CSTNodeT, RemovalSentinel]: + ) -> Union[CSTNodeT, RemovalSentinel, FlattenSentinel[CSTNodeT]]: """ Called every time we leave a node, after we've visited its children. If the :func:`~libcst.CSTTransformer.on_visit` function for this node returns diff --git a/libcst/codegen/gen_visitor_functions.py b/libcst/codegen/gen_visitor_functions.py index d9a9401b..0666691b 100644 --- a/libcst/codegen/gen_visitor_functions.py +++ b/libcst/codegen/gen_visitor_functions.py @@ -21,6 +21,7 @@ generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes") generated_code.append("from typing import Optional, Union, TYPE_CHECKING") generated_code.append("") +generated_code.append("from libcst._flatten_sentinel import FlattenSentinel") generated_code.append("from libcst._maybe_sentinel import MaybeSentinel") generated_code.append("from libcst._removal_sentinel import RemovalSentinel") generated_code.append("from libcst._typed_visitor_base import mark_no_op") @@ -99,12 +100,11 @@ for node in sorted(nodebases.keys(), key=lambda node: node.__name__): base_uses = nodeuses[nodebases[node]] if node_uses.maybe or base_uses.maybe: valid_return_types.append("MaybeSentinel") - if ( - node_uses.optional - or node_uses.sequence - or base_uses.optional - or base_uses.sequence - ): + + if node_uses.sequence or base_uses.sequence: + valid_return_types.append(f'FlattenSentinel["{nodebases[node].__name__}"]') + valid_return_types.append("RemovalSentinel") + elif node_uses.optional or base_uses.optional: valid_return_types.append("RemovalSentinel") generated_code.append( diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 70a9340a..149904e3 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -30,7 +30,7 @@ from typing import ( import libcst import libcst.metadata as meta -from libcst import MaybeSentinel, RemovalSentinel +from libcst import FlattenSentinel, MaybeSentinel, RemovalSentinel class DoNotCareSentinel(Enum): @@ -1944,4 +1944,8 @@ def replace( fetcher = _construct_metadata_fetcher_dependent(metadata_resolver) replacer = _ReplaceTransformer(matcher, fetcher, replacement) - return tree.visit(replacer) + new_tree = tree.visit(replacer) + if isinstance(new_tree, FlattenSentinel): + # The above transform never returns FlattenSentinel, so this isn't possible + raise Exception("Logic error, cannot get a FlattenSentinel here!") + return new_tree From 4ab866e40f3253561a29c8306e0c5d371b5015b6 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 24 Mar 2021 19:12:56 +0000 Subject: [PATCH 106/632] Add BuiltinScope (#469) * Add more builtin tests * Introduce a BuiltinScope that sits above GlobalScope * Address comments and fix lint * Updating documentation * Update scope illustration * Add BuiltinScope to libcst.metadata * Lazily create builtin assignments, update scope diagram --- docs/source/_static/img/python_scopes.png | Bin 94240 -> 108465 bytes docs/source/_static/img/python_scopes.svg | 700 ++++++++++++------- docs/source/metadata.rst | 12 +- libcst/metadata/__init__.py | 2 + libcst/metadata/scope_provider.py | 59 +- libcst/metadata/tests/test_scope_provider.py | 74 ++ 6 files changed, 584 insertions(+), 263 deletions(-) diff --git a/docs/source/_static/img/python_scopes.png b/docs/source/_static/img/python_scopes.png index d5ad158f11f7eac6f8d7a1bbbf9868f28f1f7e5e..0c1b0266eba03ac8aa03c04dc16669326fb8a2fa 100644 GIT binary patch literal 108465 zcmdpeWl&sQv}NP&7TjGEG`JHWSb{r2gS)#s2@oJSBuH>~hv4q+?$$WXT)ywUsi~={ zntAhMe$1`v;#Sjr``mN(*|OH!H$qua1`U}A83Y2M$$pUh1OmYVr!e0U;ekKa{^Li$ zA0+z^T23Gksq^a#irSH(1bCCsSxVFSvz?iguUk z+nC_m9+tX@b2T*8zsG|pPM8-+^528&?hT0I-}i~&{wt?ke|)pI<&}>a{XYJ4B|=Q% zE`KGmQzJ~QJS31NP5)M{#C!bLE9YX6>&8+Ntfw`zek)lJ+bAW+0n`oDq{ zKe>rqg~5dPPe2JEOnm4Bfv|vMf^gGo&llf{j$PfR`Tl@>(_LnTBc{;K3^#GJllfHD z#;f+w=jo}mo$mPYDW*M)!I_?o*ST)}!OzdFqxbTFR%5-%gWyk{&)JX1%7g99|MA*b zJ`}in+s59um6kd&)mWwaeH=027A8g@|9|d47<2ZcR-jI@-^bdMwp+69hsgTSuac6~ zC{ZBn*P&ry#Xt}LyCS)iGrLZk5kl-mb!xS9s6(2rydhq}h!N0Ja;e`sFxFysRp+tNs zuqB(vLR_f1yt_ICmknePGVP*WD_ z5Q+$fyHzT#wKl@eXHt;Ag%`+1WG!OTO9MKWW0;zTDi>0vT>}li5UC=YG{w`-Vzo52rQSXK zE#Oms>P~D*dN>i*_Fb%nw7_QGSLwuFB=s~#Swn*w#uM|GWqsg`(mSWP9R%{!I_zS) z5e=ceh*tKE4_397OM?+tm9`IZGj)$^e$Xev1d?JsO<$SqXVOtPS`FhZv+TJ7g<@pR zr`t3tN>4@A+YR!OM_hH^KYEv>@L?oe^@@pZwjYgTx&j6j2gbA8)RfUv?;|&6>t>G_t-`-a4ydFuK>NP38{6e1#_qAj#Y)_WMJ?uDW=zpZRtwf>%Vss zymGQ{4ZGvm?{W_>Vjqy7&c+4C!?9--3?eWTbRs-xrznvR=ZZ#WqU&Sr`kWBfB-6%z z&YVH@&^>Dm^AqePWrWDq^&E{^%E0KCd1H*JwagstedXW)mhU-N63=L!|iU%H#<7h>2mqty$I;>- z@4j81Eb5F;67z~|VoqH_hai77JH>f+Tr?PjDKg;z#b5G#iY*6l391Q~-KF%ZBJ4xt zS@zKy#XMUdmL2Hk<*;uak8aE@|0q(+JUY-PV^;bXSdLD>lbnZ*yhv**X1y=PqV=|%*lDnZ zNR7)bCO-ajkuFKE0Ws6Fiq>v*H{7m-Ib`2klyC7KMc++cS~*-?J5faZA)emZ|H#*F zO*qP~6B?Pcb@!a_NB2a!_4lQeKS+N*M*V3sw>+nX1MG5mxBSK1al^F1a}@6GG1mOz z8ZyU;%m?b&;hfuaGS@?kT1|J%`t+#jZM0C^@MYGc!EK9rUL|=atu1RF{#*FN->YK> zSY{HvIBp!)iZy<>H!_k3A9zTS+nO;0r%6v-AzK_&;6Lop5rFGtW>1f9p7Y_}*>0zY zf@Vu^B57|ucHJg&3^jBnx0Ws?Yi~ZjNlbWgPySKfOOUL`X}I^&yO7nvDqnu+a=Dhk zfY{)8<8e0-Vj$P>xRRhaxmiH48c9sx!*{YRqW{N-#n)}&L{rGaVevWA>GC3Ms6A|G zaPD`;Q(u}I7u>Dioq*=l(8ZQ)VDRfGPJmG)%=)vJH;@@U5q%{ zeiXPr`_uEu-@w3;xGd92O!^T6*9?TIk|mWv$wyv%yh?$@a5pNB$lr@E^X@raP*Yet zbd=usy9X^z_eC-??&=Q+Ix-LfKGtVE%Ua~ko3@tp^UZss)|Sx$-Y$BKwYRHs2NMq5 zKJxjBetnPp0lDp=-2iod@nj>L;oY*(J3{Ape9sugS|ti>@rFJkZv#FKU^aH&^X5(q zArUmX3J{iF=`gr|GU~NSK~=K zE=LqyiE9W+Asi2nf4EP;UoTo9SU-R%rxk*@&G`}m2K8b76a}H8-~usq<)oi`+c4e- zb?}m|@Mc1eXxZbG^{%BFiJkAqv4lbpFT#* z=gnC;Hyj1BZ`Kj`#;l-)CB%xXHk!&CO}fqrU4}l*Pr1VaK`x~R{v&&O@W71&(pWQK zdlnbY>uZntiXO*G?2XWK!>t@fUTI?yt!Ogigx6GSiBWl5d%(TAJ?r}WmzT7VwS6OJ zSF;`5zujkt2Za}o>BfQ>Ru~tS=yyVlt-qdz; zu=}OF#;*l_q5C!MX7cuhJBC!DlcFqwz3r{Ez2$IIoM}Q&C;9x1!G*o8b_9L$s!H@4 z*DRD|q01j-eRHEaVgE$@G4guB2*8h`4c;rQDfnK^^E~-ZiQpig=&836t!Y0q&XAhO z;yQFr@3_0~J@{rkQdf^E%->`^mv7{hzQ}$!8ROnwUn;ii0$i`^lAWeKQl`kYtN8aW zlzg_nd($D+C0vHPy;FkoF(Gz3Se4zRkKGx2tGPu&Idl8xGiv00ygbWk9HZr>O_WBL z?9A`TmRg~djy;e9!ae^COkPwwJRYX0Bqq$&D&R`K`l7A;0zdyzfBFrQ3q>V}sZ?*b zz}%LBmx1D4YjSi}%#y}h@FocuJYJBj8xS+oth}}mZ@+wRoWF&G!meg>75a3ylJB<( z{*|T%#RIJL7)#?PmpKDqDax9lmX4UmcU=rCE_g_>z_K+X7-K^#Avco0;2U{AlJ{b% z@W8ePFQ=Im)wu|3|NLS{MMZQ_H+|eqTK*(#bcxXu+3s_@v$m0eBY@+3GZ2zYyG)pW z+~V|w-uGcwLbmwWj_-*L2oWMikLjC#o`vna-?jn44xwK7r?Xx=^LnS!+LM!b$fMwA zHZ%M`pEsq%I?T6_xu83ObTYdP>O>N_g1-2D#%pWa&_KdFC5UBJYVZ;n;GxS&$%Te% z_ueB1yxtx*-3zFq^^~Hm0kUTilSZ+Z=I8)EzQVat+IvmysGA|lHJshCO+gJV&nrkc z4z1E*aqJCz2N$+_4jD|{beKpR#Kfn6i`CBPP3GJse3xG!IPNBMFRDul+AwV#{_+x& zgl`E!RU(lef)J{xVHXGjEG9&mIK%};1@~wE{HTcjP>nHB2QqQvQy3`i_fe z`}B{(XbUUQgqetm65`;Wz}#F}7)B+Gkzf+U#m)$0zPLo6z+a3o+t1&Ip6qDbGnK*Y{KeKkiizv@CQue-AU~{Z8U8K( z6}O&{i?9=PxHCaTrS50=i87_DXw<&LI4bWhxJFKBTHKKA1PX2L@gLfhDWNaS@;5SA z{G0yESi#3gF6~KEPL$d%t>oDlxg)~Citd;;R}$mMm?xh5=8Kft4{pL&_gmxl(<;Hm z#sn+@tu}~iO(}oHB%MQ%Bm-Xq(ATBk1CfCSZC7r?teKI5pKotlRxJK5t;UcD7;Jo{0z_0_q4Bc%ImbJ63dsSXLV!f+UMny+~Y(?GyZuPgOo zuBdK(NB`6MQVaU>)yJGk~MReFDC?N;=s!k8(l}-e^sY zWjb$Bg;`rdLnJv?R-jYF`AGC;cc-()lO)3aqlLuI3yfAe|4dp*eeGu#no%-|4Doz|61i6BWq2We+Lj zgdK~b-|#9X+eombVOC`P0L_KtXsD};*d6)h%`RScG7-MdrcWOJDR@RNu5|8Wz`^n< zHqUQt?LtOuumNJ}%%Ol39esZ!bUmblvvNCkefW zLlPMZ_S4~9Rw7mM@O~?UGPPJ)p(_N}rg5~sHn9k-{g1m(i6RL8>#ME} znSzn)nLG>|G;3-F`tn0+pQ{e}*1hjIpqH9ndOZv>Q1lwn!kO&bHDS)=xe?W9zl#0B zGSXFtecUq_AiiKj73DE8@$<#_1=DAhDuT)btZ|8%c@BUd`{ z~DqL&LrF+rRG_eLyC$77ktbyF$w3 zi1t23Pc>91=E8>B8VXb6Z3Ccf_hYFSnC_@4d!C|4R_yCy(-^qfPN726I&V z`TOI=*Ju8E?C{*JfMG9tCL_Xt6IMiPyMV{`J|MUM`%}+;2w68yqYEslwkUgi-~{BE zi0XXno+a4%Gsep2L9~pww{0pIeJ>|r$Fd*wR7iP?D0EMx8x~_mU&zADa^Z|$TFg4m zIAr*ti)7`d2_LhWjR-Tr^y{9R3?^<22ToUQ!UlK=_K+QpT$=$H+Ka@(!1YdaSPa*W z=j*rIetuZj)t8<=`ZW^;MCF@Nuh;JL@xl^JgpiM~QzB^Ft_8N}m{adv4(qH)=~33K zwc-Tm*?hfC5HfP-Ne|xpON6M6B{w!;%~hTBwlj#-;$?r!#Nu~YQs^+ zBnELK5a+sv)UK~BqW)bXHRUcPo7!uB30NuJzXNBM)$ad6K#@H3IiJo7-U~_ih8)cR zd1kVRYm0@i}<$#d=?7>t{}ke5#4KHP}PN|C1w2glx2(?C^o@!p<@f zJ34dXr-Y>BuaQUkTa%Wvw7xo8SkUIuv&t@$PO;ZY!OT)XA&a@%fiTWl=6pmW?V-m6 z5TxCo7QHH}Ar!8&8uIu)y4Jt(tP-h!Eqq=Cj=DOYWP%fYS>cP1w6^$Gv%Hy z_q^OWU;!QFr?embbk8EnZS~VLx+CPNDPTURj`~JI8|IC zV*ui!ZyS!(ElXG%2HX>D5bRWH<#`L{(Ehh)EWnGK{P-&>DcOCb>2-NDVge+`0K#I# z>uR2+*v}dOqTJHon;s!Od|p&k$L;?{iDrLa3l3ZDrQO-1Q7ucB#4=Y%iK8w*LMRwrZRobACk`vmvsemi^`oe56W3Cly8^0Bc-MIS>n& zI;vVS_cdG5Y#Acg8X-_$5Q6e~H<#PKL8+P&M2a#gzs;7a=5y&cM;GPXcP1K$In->x zzUip1M4E`b#`>lF17$R9dV;FE#d{klIUerszgc^V;$i}MRG9;HLp9#&ZEC1Bwr{GP zF-*E&>4%hHGl`o0?5t#j=MusFd`(I`V#fCrO?8PtYq3=C7*w{l=zXVbshjTr?8qs<{?CG-!T&xJG z!$7-{zVI-CqQ7lE+LtmiIfh7!YVuAwdT0S{%;l9Nmv1cY{Q-4ZXR2`i(ZzCxt@)@; zj?G24UeknudwB`_-kS`GqQ^rVnj8LGc40;@-l_U^RoH3SnN%s<9445svV76h;jsoM z>Ix7S)5iY$bDB3v_!5J2V&5eDGdX~?1J^<94krKHYt5iR2)(tD2b zv~QOlvOylm;|^zJ;QQct(yMfNIx&$cc#9s*q>%_kcZ1T9$NR&wIpI%d5=(jl%1<2& zS~Gw4e^OHI)4stc=&HyDrBYGjYKb+>0+|pPF>4?4k(rY4qGA`Jml{YETPnpl6e?{yhOT=-?Cl$*4ITC;c(?LLCNtcm{ADHj zuWySBQL7^d?Vq2b)=?17wP(x(n~wY}uKsY+u<|=~J$T=O%4C6(jmA2P{dg|TB!taI zTE2YD>847RT0g3Q+9*wG&c~QuBkQgifQ+}gIp<7VGBYm~>V!pjKT8mS5a&EqtrVa- zINCw?ZfuI0b1Vzac>BHmSUf{89Y`ZS$l1}*1=@=IGT+WY3}krr zLh3)?K^E`Nm}Y*Dd#*i-D zS&W`vyJe`Y1+5T)BpwSN$sTQ?gPCK=8{CkJyz7Evn39F?+yXrltTuu=T+{8m$_LB- zGdStY<^Y)|Sg8SPc?qgJmt}503VL`iR_Zi{Gn+82p{Lt%2G<$#$dK+QFkQ>)JRPF^ z1E=ZhACZzWnK%mP-+jr&G!$ob_x<+xcb9l5C%1oODBeR%Hd`gcp{)w0)m&^pp zCNvKO8!xCWfnPK3-BUXkBdMcCT#s4o8|;4mt~f6@!R}S>ubo&{y>^ZL6qGWV z*p8cpo-qslTov(AHPZ@x?)P7f#E$y%bFU(p8puu*U81<$H#{J)EaW4?F;f*4n1Nzb zq=@`{cdQmbmbV^Eb&wp~iz11gY)Zt%4-Xm@wGpOoJ6|5Fuw6vfefual%)p|M($U8W zATRwTPOq@S-9Q?Kk$haDYXaNF1N&6s&BuN2h&k?oqKkrr3o|mGQ{Y6@FjQhY3FVKQ8pBMq4AnBi)SU zHFp0DzRyKEx3LX;dHyUVm^iQZ?y9M0dhbK4My;y`@8498dnGyQF6UFelu<&MwchQc z7?>p;wZ}bQs-QY6@nh(JJH z=C9J~dlO@0Vc^4e4Uid9=LFnrJcEv0K5uvxP^zn*C3dvMCPMiG zb;_;NOp(pKK^BHn{D7a}pK;JcE*$nzFU=0bpth*rYQzy%F9E+O@9d8bjHVqB!E%o>+w^3fOZslPx>7{i^=Qn zm7`4-G$>FpxNSMKBUAH6tbuM;<##zL6|mG6gf8&Goclq(+KTbA`rfqe{7(Im(aB7_ z8+n5Y6~v{lP%(ZqV_!)wV~Pp+FH4oV1TFKPG2v@d?~R454GS+cP)C+YRo1`?_+w*G z1aIB2>}uSBCOCrk@ZFzuyFtgU%qOQwAzvki3%9Q~HO4wNyS2NUrfDNn1QPh5aGu9? zzk(dEAO?yT9h53LjTwb8(%Hjlz;mtup7VSDw#wuOU8v>~o#YH;*q+ZVSpASwP-Gz< z#eS9M5Ie8X{U^NZcPuP`C9TrGXaS{}h+FPaj#)He(59*`P>@%X*dHhDe)WEo^L`yKT-CjkwoZa{ABzcH5YDWJ%*7u*zo% z7|Cvv9*-pQhLjXvoFx0AP$Zp}i>jDn;r`@omL3ge9)NZ8w%yGr1Z7VP^BC#S9XuPy zPm_NzZ{c>(6{6oC&@T=hRe$o_qSneVFGL$DHB2ueTBUYbsVD?3P6kBLp}{eh<+@G( zXesh}BE1>E7)``Gug+B|xax;61cu3&71@sN4}YHDaVwjzXesT#4FcF_#l~lhLdauJpnS8hZ%r${qwYrVJ-{8&F@`70 zdGGju_?A7X1~M)GzQ1SSGS_DiT-ZC*^wOJqk=bUj+|)0Yt@~~u@wNXQ2-<6g;I=hy z51`%&=tYOKx&g<2Hbp6;;V4hac z9Z#!=1l%WmL0djuen(YQ`x59av3mEPS^yf+c82DDM+5k!Zn%;ADg<(|A$&@tL(GzK7__ALC zBd03+c`EnJ+36BK# zV4H!WBKl&4Z+2C*xG_;0nfY%>(bA=i%{TvnbiJPU)mz+6j0g8%sVSdqr?xctT=R=O zTN%O`xRz9oA{ESTxz&um#6^^VpsWg5{yO2=-SC%H(+vmEvPrccPG3S(xXwMf%dqC} zx~;J=ax7JPcL}x_RM*dK9dYxgYq+X+-K?M;=zua^2cn(M`^)ycR+MmI)6=#EFS33B z)M=5dUnU~~XB6F*viuoT1avhiFKd%-t&K65&AN1D?a1lTxE~s{v32N-VUwEsRe}+y zWPxzPS=5om*jW>1?oH>t?;KUI!u;ug4s3?vSJtvBS67BW}2Z8bmNQkxSVTXfxY&TaqacGFY zEW3ps50xpSV_~6X{kg|+kiB1U->^R%AP3>@Y;PPc5YSlr?cI$A+r-i^&b-nRA9P#> zx6=M00kdzRF!E{UOtmkD+u33ZL_0Sf4pYr?De}=kpvt6Rv#C$FJ1&3H>iUk$sj5NEe!LclQ#%p zvj6tS4ZV9msU1UPe);hAqAbl1OTWmQ{cFfeCR?V1YuCyXz>XlK#ue4kdX?Jhd&Aj^ z)c&T*P-1D|$}#R3`jH0UCw^!P1(d>Y!fN2B9PB^0ofmyDNQ{dxe{b%!5Krwi*m1&{ zfUtXHdqs-`;KeyPcvIS6-m0 z94VyCB;roK)PY$&IWjnQ|PD-`ImaDbyZt6++$i1_D#99Ktz;zdg-F;O#0PD*LSxK4Ek9VOgOApo^X2~ z9z2QOfeKx8zL%7AJEepZq1@a@8?p1I9JaO2%|JoEW2=~x~~U+O@V!~Jk=kxie# zq4I%>kYOXCo`?aMzqC6+W(JvtRFj#;+Wn!A*?Um!Nh2_SJuKK!`L{ zMfja4dYLEN8ZoA*5{L@I#LAGpCL;Cq8L)BRT_>Km-`7jGpN0un`)DBKfduTL_sH=j zevqrWf&@0wd3866oQ?%PE38*~`>uPJ9-#agOr%29rwu#j3Gb$Jhu+qkEUrHX5?abn zeuiuk7Ox{WI5dD^pEf9fU*S=YD>>@5ZVb}5=5Is*ocX~booI)W$hE08-xE3yi7&wn zDH>vyB{OgDCx3uuT?Yb$0^($gz5B02p={C4=QELjN4DoWOB)CfvCsXI*8;-a%KBm- zBFjKMxs{w33watwo&imURqP}k#?3Fu9Igg@{s^lt1gDyhgflnQ@&ubX%zplTt~MQy z++OP#2$T_2`MXLzGu7-JO$W?4v9-0XBCGE!siQR5@G&3yWc&jCj=jETY#++Pgtz_e?st?F6Y&hKAkUjVDw)s|mW zHa6(4^A=#i@sr1Ni}}N#;rtdKvVtEFgN!VoLC{}(y1bUvKIb~JlQJQM9V6y~`vg=I z{KT*gh5ZDFlRpNREhkmsX?Yt}tXj}(D_@2=tuIZ(u36QdpINIZWNtDV5w2=jV$QDy z=6WTG3D9~ta8DFTbSjOCpNk?00?f9bypy=Ul+XJ7eA-Q2&iZ4NOb^k~?_6ZXCU+K1KL^R^s1F(JQ&xhA^$|yKIJT!)#L^N$KBBBmEJTN?Nv=Ng$!-=xX5%#+U*G8HNT8)OtJm5E(puo#Q;Ykb>C& zI52Vpd)c`yKo&C!Btf7jhoJay_1@@Em7{v=R1 zeg;43EODp>>1J&!0J@NV&NGX8uZ{a;frN?z|0genp~HFf&1?Bf%R;TXBU#uLVE6-N zL(W`Wo*M*EtmB?@=+XB_i#0abV5iIMtONwWdpS2;)P%8#AWguob7tdzFR0`DF z+#YwOgw0+Tzu7RnXj3mx)Jull_A?e9K7!A&S;Y)5@8(ExJJ=GD2hhwWv7+h;C#2wG z;*{pa#ZM@gZSM%~26cmvS`bMcDct7+?A2H#=qT;!nC+hqC~8dA-@x>cR1{=mNVLhi*_^^^saow)$1?zIXb_Vrq*H`;$YFcq0vW3_h6qrO>S z2YG)|dFFNY_6b}>PW$mPKvTy5t_&n_#C(1}x);U`8bAKpmz4Uf`PN~zBF3nY%WKi$ zQNH~zJ3GX4GLHZ{__ts4lx2F%9i$ke2 zE9YkPeLkhOhZAhbk+IJ&xZ8mhBDH)UAfSKmGgwJKMR>jGmPXdMa@1Djd6th757m)X z|4dM>-kRl_)o$wt-}wPCOZHM5eBmaWqBv1P^1p4-INO1c^3`3a}haKA-(T zk=*t%JzZCTYys@dgO&%7M06iWK3SxZLrPiqpT^#*IY#Pkug1&VQ>1XuZtf4k4)1y%c9?G$O%CFMJ7h2Y z(8{YmMG@ud_!Il&aKQ9;C&Kga@KU;o3!B6(LH;{YDF4iwtr4K^=+i!PDc)NBmX3UuQh&4 z>z#=m8fOID(J}|ARm6vf!|O{8z4Zp)8?4`O^BP8zXUt!Lao}=9CsC@CPh`xvhD3s@ve3JtB#PaO4=|md)GY zzRzCqAUAsP;Jyn=(CHjau+2+i(i2@ssLcxoos0ahhPv+Bkrzrh9G-9eK^CKx%Jpy2 z&a8T&7*ZF6i@Y83?wY`eR^dL<>&?m);`8B7-uPLn{gIodpu4VWk-r)mz9cBUbagn# z0~=>Ot!Ki~3IOfoeB9@1*U^?Q=l%7*Pg#H7E6K9<7ZirA53-Ksjj8ifz&jHt(6e(| zk-c7F7Zi}SQ)t?BTFG;odW3;gK^kwneZhSwlu2b&@;4kJ_FgcyD^)Ky0Xc;to`C)8LG zx0fL7<+)5PuU0O3yZDK*3KxPSSj;jgsSX^6N*5u^w@Xczptf+;w z_Ra*8w?eWH-_*)7)yy@f;@nu>)X*LyfJA(^ltkm2iy-z7JAq~X2yQKoB6N$rhOQ2T zmkK;7weYZ1lnoNXs&3Q8H-5F3&%t#zGTOyBv5$~}R*TqTGJ~J_%;HKSK3kNCLZ5cF zP`1D_znalRyiYI~1_!zq2jrFY`3P}rj14E!495>&#leRM9kS-suG}PDb&D|P;7g*L znt}}uTaR6~Vb-6acsP5%TI}BeVqk!!J`eV`e^}w0_Jm%t!Z7q&>5N`fxv44Mz!bhc zj(dglhvmFRh~1mAr0_blCEEfaIvKdyeM`8NmK494DX3 zZa>711_Q$5m{T5W!Acw;5aykDej5fPbg%H6k=X=pNjevKb!0xpSPDwo1hid$98Wn6 zf=;Zl)5z-Yzr0~x<8UDS-R>(#H;fNw#Nhr`2l&t`O#jW8LgW}EGlllW6f}ky#330u zH2}sm2F=!@`0o@A6am_91(w8q0?c7cEnNRP`7##IS05Y@HM_(~J=|`v!S_Rp>+@rnGi`}Ha zWLCyD>E9N+aOcd1PcS1gIE%+< z`H@M+L7!X<0^V8lks$`TEwRNq$bK*S;{JOA<&3#ji=2KT_5LGLeh-hv06*04__}d@ zjPLk;gcF1P+goDJn6{|nR@x`kJRMsNkU+hySxZ}Zi0dd*=kmSDC~1a=#VDMrbav1K zQ7H&>dGE3ULIYL3>svZ!<&2`lqUH|!sgKG^d@Xj@WDz0IcdI2bSG-dMyKmTfcm5s* zMqv@Mt}Ufjh5Q2e@vC-AuH`3uKwweCZ~mJWG^_Mc!{!puQvfXm1=i)s(LlJn*o&-T z#dC2WM9q07WxV;QfHY*$vP0THRy=%o>fO+bwy=^BnU|4*bHsryGssyO$sw%+gGeT) zAHqX;B8h?;)tsEo(8<7mx|A@n)_uPh6kg|wpv<*3%#q1`KBTogmf?RpLY9T4;tx(` z^bydMRBshtba>zW>GzMeu^3zBZ*=JeC-` ztrHg+dew>>YOA1HeCpGAN+X)MSB*;~ko7-!>S+!ID5u!9Gl{*aKEqZ^LBx5-WNgN+NE(1x^U7f^zOvm7Npt zv!2(8%P+;DVCp;$1MO7T4Da6qVK>cg#dSZ~KdMkm;2Nf6V7%0wipuxd-R9mUGh-)9 zNty&(3L-)AKC)Ji*SHyL_)}F>qm{bZNI+~*;@L`H?eoPMGn1e>nO_X_%7&A=GE5*>y5r@g`c$r{G77MLTwjYkWDZ;R-3>+fA?~1u1=lyb+W>nlQA0=}02A&E zi#0zHIgV9u=mF3n9A(4atXIl|jTX@SStW;UUQEpPf~oG*%kxB_b$;oAb=>khxkATTH{(WW=TMRqfKoj)xSjla#Vvd-DjBxGr$n!OkRE&>Y z7<%v5kMSQn;vypL#TJ>zQ51)WR1M`YfHtydm|z8$xkMwctRmHEKvE4v;oTJzs?>_~ z9O(T4PMU&>dU9O?U@3u0A=0l|4(xq`%WPdg9Jm#S&{TI{Uy}{V=QRS158#`_)ky)B zXQv#2H!(4#z%-bfh5RRw!fcl4$JuAl-*^f-!lu48`{3dEfk!B@F#1CR#sP9g#{1#{e2pGTxEvAVA%B7h;)t{Z!kb-G^^i{3*Ab>UV6AEat6 zzG{Oq4jIxbn%@5)5Bi=ooj?Hy<#w<7x-Y_D7;j}|kR)FvQ7^OGf7XO{bcG2U9S)Wm z0qNAO3q?cuEthq7m-x1yor+l>DSV*5cxQ(ExZ|Ul+B)qaGG!C5hngk`fQ}3wo!>8> zDrVFPFTWaRjpc@SVeVWwic}Yx4bxB2P14LTnvyCrkb_p0;@n@ z?S|2*0+-u?l(|?n(zPo&n=BZRo(WtI6TP_FsJ`1wlD4w@SXGS{ckd5cjxvVO_C80H z1#%Uk)0myX0p+fDfb3)cp-tHv*c!!0;76vt%Yn+~7oR-PcX)Oxw1et6RC#2T^Ix>G z$>Ju?sfM_1_02l=$na>W%+)B`m_Yy7#80Ds7!b%XB{?o$q%7cQg8IBgf9?m`w^);@ z&Ziut{`(JSLU>-r!hR&MrGPNw?4TAg+hU^AbqY0wJtKI%Y&JU9PMZ@Qap=57IcU6c z*pdWPscuxqJ{-#`^Fdr-Auy1fEnx57685&1Ne%`%ml zC|A!uF&9uhDons~rec}<7y@nrq#ZD8alY9*x?EWKrhm^{G-1{rV-V-ubJCgc8MU3q z73w-8(mDCl6>fumd& zLMAxUkPDlu5_lX3XX$M>*9~d>Nmx!y04BnHwtT;Qq(oIw`hJW5l~~=09H{Vh9S zFAo9XdfUC__ia{cRTq|@wHnIKun4%yfg!}B{Ba4efx4@E_?hpMlc_2SNWZ7{2s{f# zyw~1d<%4;jtaLcHw|MIcvpsrhSmGe%a8}Ox+O?PQJ$MFVCEXdHG5L@p8QmAF@U=3u zDMbXBhK5nJ1;W7NDAAfj3Ou50fL?Y`fx3=D{-nP>2x0IRVmqfzL+@2Ppyv=}aajO5 zWA1o(DRs9FQmwy6q5_`h)vqHhvxvdFJEA|KY>OW*IZSb>0FQsA? zIa4{e;8Y1fFW`hyfB+pvsyL4GR0#(QbEuJuQSVV95JuWcM-WL}sZLTk66>E839~Oazq!qFS;!ED3Jl{1v__#FVhFjVzWt{hiH5j< zYkn&O1GpXMDI1Q`aAwVUlsEqt8`F9jAvu3au0+>b4l#68k z#}P3qTvil|*W(HUA-0NSfT-l;GeHy}yaH$-EWyp-V^lE}5TgbP;~M5;`tZ!RJl;JCtwNt zYdbAQEYm%piAzPu6`}QREypeND;ITN1p-JKdjW^6lJ!zr;4cBx^{1PV-BCk8nEAl0 ze>!;0!NMpxAv6@`p}GQy-?_XGN6IcD@JV}xJy#1KPzN9%%7@;0CUAi`B(2VUh>0g| z5>8hK9~L|qAS*>&V*q*G_Ns4^j{Kg1A>1Q9)wA=FI)&?((n$(jfmi|#OR z{>;Np!L5+enz$imyc7}jVxhG{6d_@1ssUGaVXA5G%7nyU1G6~|&^{wgL5nH1*A1X= z!&ZH0Qx>19SGZur`fS!( zioHqMZb807*@!`l#|3mX*9hNy(j;MLyVo)2a$+QdFk_S#W$|&>>XCQ3jl;YcS|^jV zzuo@uD6%2NI1YPx&Nm{$qWy-)ce)hTF@XuOE?E zH$)JZNe9{-DBMEhPYW2|iHp#a_tqL4Y3rk=i^X6J6DWx87vccI`${wwxKfdoh%R#W zfQyn%BT`~0()Kk1olwU}3Z(var?*^Ey+$N>uPcN`YXp|yWghUQc=cq5p`L=0za&%x zmI=kD@dB}vD79+@+VT<3bzKF5=_839o;e;T`ZgFgiMKI>c!v-1B>$R|A zuyyZbUq=n#5y-D?n*VWXkMwUNHpQoFD#OGuuQ!rh+QCqi@io)rFra@v6C}$v2V=NG zPDMoY#RXB^<+99J7y7?{|9`mD|4*lHD}TTJPc6XzJZ1lH@6#gG3qEXM0$VKT@Yk)o5^wq5Qdp1PEt&Wph%B#%aCSc4hr0{7_eC}@3*;Ry zHaBQP?zUQ%J;PLB-0jIQ((~X%BSIyElp66aE6?orfqF-TpzQB)^8bDvc*p_Ne_ZLm zdRhlX^LmR!V!fJ@!%UMDV>IIINEYCMLd&RicNu5RUgK%x6b)=*r2tt;zM>8+JWsqlxHd0DbdFP!j5}X+nzpN-jQ8s7x z`ftw2!8(S(@84%)$4hWHxDCVuWP?{2x=L`?;}|TC6IB-Q#i|SZ*nP{yI*z}fT`G}^ zkf^O+GZ$LMwXSXZwvq4z`M;;t-X?^bkEVJuf>Ugdwlv3;S;~FWuc1Saq{$&2^i~~9 zy%o{!gIKYKw3IyTNgJeHwiF)9DXNx%3#DRTiVx0vg-=SLAQ4u66;*y_}jQMTFj*RPP4hlo-C zUS`KssW>os6L)rlp;NI_^8=^0!7LN%O1|dXBK6fj6H%~zo$U5BwFoN1!R-(zmJN9B_jfXogaJt*Q-@+nvC))#i%3z&Ps5`< zJ#T_}fi26(>&rC|oxY4yQCx^4FB$UQ0+So5QD}WoHM1`$zA~p18CCpdVwnkVmvC@# z5#W$22mg+gcCh}B+Ltd#a8C$cW=XJDNQ&q78Hk!$Jnv=w)`MM8-jTg?DZx;q#+-SK z;iQv`oxrfZkd9xC6Fx@^ON-c1$4etxxoCC%6k7>Oh)p#&`U=nT>Ui))CMY2}B4s9- z2!HC{^N4k41DKaxdWG|QN%Da4lP7PP2jue{oA-SP0#QWuxW(({iZytsh>GV20pwuv zvnIb^1rDOw7DaKdw-w4i%pO4V z37+z4gp`zqJ3DGyoubr~O>DZ+bF~`ew>849BhJJXM+Fyat6dV*t667ZDw=~I(-|08 z3>}{u8P?%mNm$ROn^&l};n+vjT9)se*x4k48Y?s;4|0*7GC_irXKd%LB^PViGDE#J zoUY6Ka;qCIf2gJWoKsSjfDvrdP*4!F4~A@QvvtDljqCe|{q7y;Y>%2UYva@2aW0VX zbr4Tw9{oFl>~EOMLI&YQMNC=oKKtm_@Swj2fJ{yJr>5E!4RwoeX!s}Vg3mzYr6miz z2-T8u(7|&*vPK=RPvyrXpEfoeYE1TVHR_(9kRTQ<{i^n^$Y@KJP(P|A?I2>fzVp;t z(hYMOS+CD+Wb9|I{>(i|+gDK6hVHK`VR9NzGp&?cEL~*+r|w@<{GL1uaiHZ+9s-*t zK7pK;{Vt@_<#LEN_i}h8tk$!78>~CjY!ZY4&8d&a2!8ALy6Fjb z4SIQ4S6Gx(Ujz&SU6SxLS+$;AXX_IQasTNr3-|c+i&*!U+^lWcH)Kc!w>bzZq}VMb z!etas=qV5=pOaD673qBIX<7|SOeaTH!I7_OC!71viLU~NDSt^3;I!aB7alF(Uyn_b zjG+J8);Apbo+rs=qusL_KxYm5!9DRD11I#36awa6nQA+)Xt7F0tM-U{UyFZ7r4-{n zc%2#QkV84S5nSzRU@@;Q#vu8%Obyo~dxsA%tD36|a!|hD)zj5Ho{B_n%s?#>*{`Vf zv~6Pja3kFeljMLP*VfRAFEoDc1h-0falZMEu5^ll@PdO+Ews*W{w!bEQh;aI z7qeJDHg;2pAPs+iQ8{&Nbl?02$In}3BwfApZL@<|fUh2I$egfVby1{mATXZIbes3F zr-P30Yr=ZrqOx-1t<7f9utpv*A(n6^d^;G?*VLiQ^1aA@8 zm?humLoC78fei|HC^alf&7Os0(>kSj2AEhM2av=}LHH-g=H&%WI;@dc00>lGNLZ7dF>q?W>C(K?{1WWP+NB-0lTKgHG&G;gVbP!&c}%Gl6}Y$- zY@v_b+eTt4-jChv9Pm?b6cvbsP;hJprOSn;mvt~hvPr%1lnTyoQNOgdJ!GKhusU>J z-@(E>h4d`5_1aeFULPysWsl0rx`@?fSQ1_PAj88B;k{QVaD@+(r{>`z*qFLFzr68$ zBq=cW$s(}2af%p4x3<+QhMM+SfEPr zV1+y~b!rW1glj~*laNhU(IYvOjD!_!brpi*#DuACE-H-Br9ChRi_{&-4{b1;QFMg6 zHt=6vhM48 zvg;oa);FyaI31f$Cu&PA>DP{F*M015TP7V}C8zdKCn-ZapSKyDfCmBD%Umln6u%Dp zP0+RqwnyX9FG5r=Aes&crNW#)dC13I@v$NzzApam&gfr7z7jI7bY_PM4~uqg+I;es zmI_sVlE4OSTa3pd-gAT(hs4)~Nx{NSXqv2h~`5^*_QFAW&m{Y7*&i;sR+ zKY@uGm1_?KBdR_YInQ=Ej;hB-|ft38;bF> zxd>$YJo+|Pcyd`$HoV;Oej*zW`ZIYyNgt&VpQvtWe!KY#IgVr3E>)&BlR1zfv(*Usy+W-1PNl7aC6WkDrgUd1f`5{fw+rK|%AiKb2*0nNdV(-*bxGXt-J?&Zsf*{%0zExlqf;yLp_&emn(wycIgcV~R)Rz9K6wrJnY zWiq`liTmY%wC;9oiFovBzk1a1^mUzzngvv~(ETOZIN?zE@hJJ-&DA`o|CED{^9WbM z&C3bo1*zx)6U4hiC-5^hk8c@^Prqe6)DyE0NNv@zhC;L1Mi>ZNS4mo&I?jc}o9%@w;*Pr52}kO0sz2 z96yAMKeWENKl}%~R|37;M!(`NdYy^Cel0AyQsvQ#K|t~Vl#z`uB6rJ!{d=DF zD1r9Z)|gmpzAvl$CG&cTs`fp2nDMYxIcI2HJMZp%xI@;i(EhrV=X=}Gp9Qce)P>V;+OLDr)7%r#(+nTee^_R|8|}9jx0}m@~>1*!gN!EvP%oN6Fb@ zsTR}rmfYX}d^g0%XXe`a`R8B!_o0u(f|D)UpB)(62|`8O=liSV;O|{7dV!o^vgzh8 zFJV$EHvY**Mg4_mVm&{V*}}B8(({;zZAdc%ek^udrPgk851y|D8lKc$u&1zK<6wsr zH@$@qmld*K*JmZL*MDU-^0`ASKC%;0tIP^~iUdM?Lh@XqyK#HSgt5=G>6S4SR@VqW zC^+xgfFw@M*f9QgO;2{QWlrJ?)TiLJ9h`5Wl}fmEg74`@^6L7Fa1$qj*5V1Rj}Bum zmTQuy1cUg_V65%5qXP<>f-quZKQ~?#UP&H$v@oJNo=}-j)s5r zFow{I$pwh!I3ZzOaF8~({>6r$wG zfn#cIy|-4q9yW?e6d#Lmsn8=`MM>qVyM;k@T74X;CH{|`cD6* zV?_FzoJrIdXD6)40n!oNZCzI0;6S-tPt3pE2@7kw_lkw~iTLP}TiETI>Qb4k+5s9v z9$7FO#g;{iP3rXc%2Ht%4SZ}NtMvy~PiH5u3k?wkpL%*r#obGm-I$}t*W(z@LbZCw zdsrCLdAU_qxxZ`C zBm_l%BKrD|x;zZX@;f$Zhc~7YW#FF^SA+VSdESNV75OrMnkc;QyYB{unB1_*Q;`$z zoQVRH(G!9l?-27zG z6%;Pu60GQSLKygvGw~2GMo-H%!?#(Kpe0>7vvdfZ`^j>?O^@e=&2nS zLcKqp%;)W>Q{9+6VWUF8r)O}O;uP36)l)V6M$2P$-nya1lU>#p^#egwcxNC1V=h7l}0^If~;p9TO50<|u%?j0sbZL~Mknv?!%rq{j!w#asD!Pv5= zLk;&b^8@bsRgen4aFhC?S@7y{dJ6iI@}mGl=W6@X~QR2`Be`taqEU z(jP7)ujYjlgGrCtQU^PCS?j3qk)&L(F4A%)OFjDMbEGWJyZh9TJs_^%UH8jhoN0G_ z@fCa0Y!nNE=JJz`!^Blt|AA0d1#sAqSg?C z=svHf`bz99j!?HUmA3L_QduzRW_YYGE5QQgVqFQq zV8k)3kt40Spkj+fvc+$VD>NeM~s{5T|o&GgWpuYUz{H*U)&uT(^9?thvB!6E?4>o(bYSo zAo0Mv(0b9wmR2&p7f1|oS}aRN>r4e1RBj%K<2zldm}QG`KdSmd{v43q1hvjEiVwcv z1*7IGu^J+u|2$c8kwWU-KtgzPaZ|slBcuj4-O7=KAcu;7A~|0SjuF>}m?4M_&2Bar zXPmoLtnN%<18&$Rq94|(zBr>u-p)Xe`4Y8W%z1Fif4J|ZEoag67{AoX52CV%+Aiz~ zox|70%pkE9jJU#s4s;|G;Qz*0J1>u^5_6L>Fq?($gmBUy5><}Rvu}q|R*6J!-rL_j zi0V%`oOWbcJE%(MwQ$hVu>AmXo&DMnM)BKUpZby>wVFi7q~0c)E!Liy&UIFpA>i@7 zocYK8^v0LLfflA3c+Wg5yxL_scy}Gg@??&U$R6i@ZX7=N-~q+}-_{}Uw~*aAzoquZ z)ADb@E+0E(z&VbQp-G$>eQ5z(Cd3v3*WDVezG{Dk4aVXoSUH?-<|bH;W)%-W84;I^ zo@su3>^no0LbjPu#q6kzO^CLf_u-W`Ai6zemXAUrk3-o<#$2X&rT5>1{)5+e)6#Oq z{@uT_cA$bmK&5+-rB*ZZK_|Uo4uqkdrc&0B`_E$ZFhNYYqzKbMtrCleyv=t6%Fi|z zl3I|JP=%dyl2im_sx{sO?P6NaZK$LH%w4OHcSKoXQGJZ7*gAd~8}sj`Z* z>4)=l{lzyg10S4@?lI9~WR7$57zGm1a~&|wgR}HGEMAfvXWO6<0IY+#tN~2lC~|3# z>h>=37uWL3evH>il)eLcBY+rz?VFJ3M~#lPbd5S*(p<(I8qR>7bITqtKZ@KgX&e6j z$&EgD3?G7B{bq$to%y>o^LNhxs_KvV&fAIo<_5#ywe&5d83%MP5K$V}zz$Wy@E{?a z`oKb=vFBh28>eaRDxGc;SBjI}RLWDToQScJQsYk&H5(t#8*pEUtdLFb#;dyGGw!|w3V&{bQRi{UOe0G!7jKTk@ zkGAln+~5Ngs3q7mqhwUw`wFMFIF0D3i?ARcZn*YzQ-6LKjNKDqT7q+|ToYqSyU+GY z*SrF0%$uie(3h9xE%J}3Rr3=?tXP|e3PFsNr|wJaXGPpDIVp=hCPk>F*aq*dQ6wmL zb_lx`iyTsiMl^xKTAr(6v0>*u``d(UGl&==ngGKQmoa)!wO(Rh+U9_{@zXf^D8GrDfQ-R@GdA37I^E zu-*5RqhH(XXSEDEMglaG@p7B1v+JEUGXX4oThKgT+c1$m6#Qp>X{qSs4<7TVVUH1= z`ASBk$dAtngL~3>mu4p{tV34{iv`ePHVgJYL=-y@(R3gzir!CRx|&i-A~;eHKQiBQ zGhlX|J7XiZRM{`Y!kDFUt+j4NhFBm}D!MxSrp`ec1uDm5@#D?Q0B!UiO_@=FMzH+n zQvpl@)Rbid>>xZ|0K}+bT%L2g5B6DedaP^>KAh#bWF`158=1&_LVyZ3D*uyuWFk1o zve~qjO1niP9I$H1(pb%gf63vLYq#U8$r zAj}dAm-c-*^dO&zinoFyo-IC+9@;Tyl6_>8)!aEoY)vr!roZ=yP$S>3p^fL+`U!AL zrl|~~@rOMTkg~G+mo!oF9{nJsLGOU}2k}d^%)D9cK1Ghvu+{sca0S8Mj#6H&V-~k^ z>{KkOzJL10L(tXD`!gTX-gRa7TuImclApV}bWb@TbpeN-cQ(G7 zLmC6dgcXRzLwL>dOT(4Z!T_|m&`_U2-}W?w-t=NTBI*&BeZCP$Ddq1zfxs+9YwkP` zlyc@f#*Fm&Fo!K9V6iBJ-EHf^01#%n+U@X6wLOaUWbFm{SZE;eq|fH!LASi0V;^u&KgNkJ|Y2wqKg{} z^eh>GHv$v&G7gfKQHi|$^nkq*S~6YOXAB%%{#245=2&El+TK9Hy*%owZ~sjxW=-|HPyX+>Q9+}+*1d@fK0F5KA=ad-*PAfd_VXpq@04fSiNenHf< z;K6I>M$C%mwvc(}&MukX)ZkjvOpau%Unl8fQ(K&~3idHCDzMi6LUR*4Z;gh&) z0X{&z*+{o&mvbXh+ZS4$STT|1*$+9;dmDkgG4F`n{=TPI|4L)Q1sq9^Ag95jPla=i zbwJzI2Je_4GRf!X`|JWOWV-b3`u-g+ZPhrIwFsTjwytNG1NjQ)uMR^$7%eFJ2v8!* z(AuJUE)a%h8t3@wqKKekuqrkUAea54;Se^}LnYHMZ&rn+jgeeMvU-A!Nk|P=2IV6# zy16KJR8;8%e=I_4@+NA{^O@mb`fGfHif|~0W5x4a-(tM>_j(71kg+YVQ=Px!9mTtl zHk?^nbMk2P4NtBCCN@FfZEl_o^EmRsdh)LdWcs-YIVPzfQ$pRa6n{i7~upKKk+WQlj4k1WN z6kQYapz-`+t5LGj&cYwO^pD)bk9vi*n%f*7WUOg+0@XK)kx|N0U!Jh8kHoR0RL^vy zBdUnemIEgX)S%({DE-UgM3BpIV?23OgOVFK20Tor%&ewIL_vwd!7^f(ONA@F;)9a=mxp{b159<)EV zJa5-}|0zLGDAt9;FW`My8h1fEH=y?F)Gm;wuxh=#&wDK0|FZqF*C#dxj}}~EN_NEH z%tQ&A{Gq>DWN=#J4)8`=XOrvA>CE6{2WPUzy6HC1H0|FEU1Q<(RV0}(8(1XPM^7a}zj|KBbE9WdPpDO+Em;NnS?r%-6F~896J3i*X{w;rLcC*C ziV;^9JnbGU;n|vb)_WT1o`v7L0f~)Io4J+|{h=l-iu%mKeFrThXcLk@(DHb6yTv2^ zJIiUAAKHf)IX5oGSiJ(mag5l+>_C%5#BEJ0_gIf^d^vo%s!sjfGs$JEZrb*Mxb)Lk zUvLIh$P9Qlz^h}o{#6^+?#@!{Z*ER6$JW9bd}e?Vzv#M- z2@4Xn2bdKs_x!{oQnKC4>V$U`gNsFW4jvgRqReb>FvXxSZGJ0fF12URl*0bO-)SLe z?}<^_*FZe{U>Iw=CW@6#RHiMf7XOpg(PAsi7@TNP(sit;g(>y#tIwLCrL7Auow{q) zw)!VFNQZdf6F{&TA(a>`me;^g)x1)PK<_CR8a9NFAD$yH;9ySDKgy8-XOxGY^(?Wk zb=|z`#be`E?tAzPZAEp-8&8Twdd0VP?vZ^YskiPB|_NO+yfzjvY?wV4%|R6c!(~mI1vrV0Ao% zjvn2A3Q!CXF!D4-YYhEPqu&y|9Mm!8L-E%X-(^iC)Pwe`va3;B{0?H0`ER zjIcNwL-WYwH5-$h&zb9&f(fGV7&I>x6+>GlE+44hSaOW2EPP0Nw;1=fPaVAkF##9>hn%Y!JH zFuAZXKl&inmunrf^KNKl9ijIFr0Ov%mzlG({_~^v6*c~4E-z;>SjVLX9GkCc39h>B z#ixJwT>eJ8FADoNP8Uk9hC_F-2Mb9Z5z#m|uMfv!>mo{-XyIUsxlbPw3MYq35RoyA z298+hQ^Q%|rx8GS`AUmVKig8(XX-oVgXmObcX1&`9v*=T9Euz$Nco-<695fzbys<@5?|E|2Izs zlY}ancPZ!Z-t3@1zqC1ZS+D2ejiLz!*0VSR)G*MPJP&D@0SAQHpgM)8CgjKuBQ431 z_Mv@{8}0fG=>QY(4x=dNzZJ*qFN)8*8+Fl0A*@bP%D#eD8q;v_kf{?cZ&wQ?R$vQT z7n6SRVO&nTF!SJume{nUk(5GiUKX^y%fUUN3f9v^ZKST?=PtHB_37;!1`U>u2^gKE z6F6F-ceH4+@Cg$yt>L3BR|ZBceVxD%$oXMuG5Kqtpp~d@th~|q_qaMBR1){of5vM~ z{W75`hZ+KBL9caQz(CB3cAJ`B)rnMLRg*h7+yOM0I1{J)uNY?jx&bss5Bnz!`uy&O zXCNt=-M5N=Y2L`P;D1_UXM_hN3YPixuH{hhg=@Xl z{o}sVw#pd}o^opY+zSa#i&lq((*W8OnW8e>B%iD443u&s{88Jdz|RVgney%11SzRY zMo0&sQhauJg|o5il#)jGO$k9a?u>A$ttbrNl%-D;B!O99ee%x_7WS8wq6HyS`;SiW zKTzp-2ShghK{2@bn6D%nHAD8cR#&PR(F z7WBJD%CG5=`sp!+efd*G;e7q3!T=w*BetCMTeZ7%>S1O|!$l3S0pN9v>}ui%h{wi3 zJ_XB9QzE<^vF;^8B7%eJ%gT;t)9ueEK{q-0G#<|vxR!=)!cQT+q8{klUiXM5VBSXa z&11XsMI~h_&$Ukws|-s(Vt;>DT|(g+TGA+__rO;yp6Fl+L@o*GZlx z!4=t=Q3@d@EMzKmqlbxbEU9G|KshtnxespxIuk@bge%{AJ_);j`U)?7j5CMNr>Al} zpyDT`u=bZTFd3~wD1OJ(2k4PCC&DSMl*XO+Jf~ip7URXi@nlQ2H#xsrMDnTFNr4Im z83k$}eQOr2^r1UGZf&ta>FNp~5L=V&x{VxUnHY`&H4MxeM?@Lk31OHQO~^m29l_Mm zZ9?wGe4DnSQi1#Q9KvH>z6q>czaTBFs_7-n76O~Y1QGr)i~&?i3MNu(rMCOgh|XJ` zdqNarDhU&7ad2&oa?oP>k3vMShlE0?e84AUK-#alO`}jh@ws#&qYwK4@0XJ=A7{u_ zZ9+#G^SwYvBvcs3ZcI{7F$`;35DLDfN-Ya(6b)6KS%`YzYH;kLT=U=864bRZJUZ|U zHzoS)09m*XPK|7$E-7x3`8b$l`GC18JXT^*{ zTT^yT(iPWoI~C}%c1DrtdP8;HL-76FFWIt_^~khSt-rLJV6&!Wgwmp6 z7Gpm<|AWahtJvLxo|B(T96xyVI3&mEy4(Ga+NU2+9~saW3>8>Pu%v|KS6Kbw69#+9 zUjtxxclsHH+yitT2Ao#qO?1R#)4>;90ia2s1LC{jqy66@CbVDDd6Aa`>&-m|FrHpc zkQz=#9Y{G}e^@xi$?TW=!G&N8nsB_ooWPu0yB}0uIjwHcd$-Vf&kydv#Z?ahaL!>4^8D0U8H>60X*%D2lzaW;8qCJpDUW>M3xMN@$hT^Mv>)NM z5!~OfGfx!Wtz9XO{@Uf-hfX}TasTM?G(J2o=C}Dlh5^l`l_DT`f22g-5lB*EIT*opCK>WQm`1BQ$qjPer0tnaJ z(G)C?Qc7fE6oJoAVSle78bmt6vrGQRES2A^ZSYVIK*%_?_+A06i0O1@%X|jm3Y7B% zx#C6+1N`z|hM1eLYm;!L*vS6ojn#l)uk+@F^Wl7c$nNFOk8*xhw!}X=A925)gx@rz z+NAC~T_>|-ku7qjB(Ni5QYPdhc|dMYlptP<@E|$mteAS^HW>F-d{Bq?5%JSr?~TfR zQ%-$n+ddlD|Ax(OEqB8ln*w+3q(x|RXDfm@#k@VC)MRsaRw}nkDT>DmqFoY?EqO650?Tp?sRlNL06w zGlipjqV*q9|H*4DZn9vV=fZ9%DF}MV9fApQ0T?S3ir1(4N*k@Ml@zEmms^f{QXuTk z$?jKQAo{IWO&L~%_=|moYi;PVZxPw8coRzVuH}jo+%d3Z+xBqBT9zBc@*d^#d;qtx zd)Dy}Pq0Lg-SIHAZgHM9Z2IwuE0MwNR&r+hZpB%~&^W|Xdo_mFsZ8Rdw_@s{$)w%I zqEZ4xiw@XcU-A!f{m2^$HjF%l*yIatokwuFH{_Sw{1Q38{qh&B(>GGc2c>0b&H%~* zP?eWU1S00^YOF7qZ>!^G%xiOkpsl79|45mr_(J)i06;)w%;K+WQ_Om|H7tOk%m0ea z36@o&v^XhcV;^{&SUszv&Q2NlG|O*0Y!IP@(Gu`{fTc8{Y-{JBTDyyDd^j-HhEno< zuj*ElU~lXGu~O1UA^qbE?eJO3B#yD1>6{laoNjH^SWRP5u?aSpo5B@Xpt*SWcY4% z>U1L2a5rq68FwC^XLbg>zzU{A93XzdAnjGZz=XNPzkR!*ne%o-xvo7I|mvdL$m8V8{9lS$F$6;bi8!n6Y+WcMzZ^|nt604XK|qW`Kf-q zfgQMfnYX_M1UWcxU_s}dC6J~=+)s;33`t)C<24GqCOOqzO!jgb{L^hnIsJOd98(~g znigSw2PA1`?c93VIKW!S#FK-MM|t$K!*A)w+6mm8ni@+R4}O%OI!XcUNRuk+bLFl? zrTFsGGtO^C(bdw)T%^?!Ie75JU?k7ck%KkpBP2_u)y-MoPh|I0Kewu?eH&!5nwpmP z-I?%m(lAyJ?)e^n#g`U$-2Bht0fhR83-<@vK?Z6-lI=YTy^ZM)Wg+;h?JnFtKm92c za63jW9Pfm|t_B(^(Iwo>Mc<=dR+(>k4vl}VjOKJ1Hw&EvaKpd-(lV9kpn}(W0th(p z==usbg3EbRCEUSxwC+BV{CX*@I75Gl!SM(szhX|B*X6iDK3f_Hj^kvapCOG-k7vke zClLSv1P>Y#&Wr+J5o9meG%=ERKj2c_H@|4y90BACJ^5Owi~cKB8OiD#?)^{Ww6cyFked7IN6i3K+V_3q|~{r$L=1c*>zx;Z@& z60F`%BIvV9zWrg+&5F5R%D;T%zK5EL`EL5CPM{@_F#dFH$JHB9Pv@6!oJRH9X3Kr9 zEs9(*s&G_2wnyeug_i>#x7f|37=Rs^n=XQ_R8-ZvNXlK_`m@9rIGtQC-{qAwZM>wg z&3*sZB)?F8gxTMMm%{!b`UnFn{aAhL^zcFpbTm;&=uh|o_h)pFD7OzNxwU=`QV`i7*(J)8oxZEIv=^O(o zhyTXdiqRXBu5~<-s|}~wyR&|PMWMJmiy#v5E7RYji_Z)~((;#Zdwuio3Wr=HVc0Hq z4m?#CMK;Pa$|Y~*6x#Qc;$Zi$Jv6Nv7?XVRi&;XgQX2*Qne2Ej6?1a~dD9Ri%#L$T za9abJ*2ulZi-*$#HYZ#}ZeGJz_wuQkR{p0~GN6Pk(O!u(e}pdF)>DR`p#Z(aS}RYm z(ybbVPMs2s9jzw(r2XcFtP+O>f(xB)+^$>=8+RLM`+EAB6zqw^oYr?bu{rfvy#t@D z=djK`gYq|W_*%`Dv}kKauUu0eorB69=wYD^~I#BT3=8;-qc$=Xh~OGhBq2h z0o_u^(`sqJSRhY6qd_z7$-$Xet`{7awmNeD+vnT?iP%pxgDho+0jy|JY%s9FRfF93 zP&vTXU%EHWF)=z9G^s@ucD7<9dj|}TZT~e%Ea7Chp#S^@qzt(K_Syq?d<}yKqWagR zfv*P;3h2K_QTv1FVBY@wOCS1_n4dz}wG|I_!5%)3&ZO;^D(4)TnO~Rt{Dl8mKjBD3 zOx?1eNfmmHy=Hkth6p0k^-mqnXZlaH0M4R7De>RsHptWkn7N?;S$F~5CXxz3`2S}m z&j0(dApjN9e}?@{EC}}hzs!FYPy9b0sHrl6qflOWiO$UT)c-u^g83>wV$+gIqrbM{ zL)=kG^jmn7-GkSM50opef^_nuCc6#(f94izx-|-+9e87}`v3lPUt8lySlz)=$VA7u z+WhD7`ozwC4pX#hUYG-h62bPKWtl}SSQQhqgD92$--lT+s6kQOJ9Rex@8BY(c@X{& zEd#ts`|pXjxjYS*G!*>%8yw7Q--v%&3Bb0KL2f4czZL-fe`1F}{>NboQonmeQQZ)R zuy6M+*ne#Rg9e5TCWRtXq!ga}u$V%tGKh#WK2C!7Mb#Q4XoB8Jphcx3%f%5dk-)t} zRDzD+Us2!SOCK_XE7}EDfI;8E$H646x|mTQu0YPWt2AkPkkg5>vzKQWpw-H*Rom0Yv_ zT5;O*N@i&PU;i&i-MH|M%p?dcCZ|~Neet!`*YTP3@P{5ndCm*+2XPfz4+p{x}p~BV`gs`#C&xHIEwTi{E2p!ZR z<{_;A$KHwcl0P}Wq!nx_dg7o0^PX&feG3XRMn3?C|yL?+%q$_^TQ zFwnM=xJi`5b1acr%+})|fRZ%huwy?=U`qEDP{9}7xem|s6R9>75%k+=GENk`m5gp+ z4oDxnzdWk!HLhV>F^iVXWZtl*i?)&T*YL%GyTxaJ27M&hfKP0@5(vy9<>!#Vcfzw54MuKDg~s2??^_2Jyo>40^Hin{ zlx`k8#1dI1+|ou5<}v_pxX?6XnVQ34@$rHFctQtPtz1?8o2q-=fXOtQ{T|X3cje*b zN6}`UavrZ~c0M*GaPRg8qum`vtM)3~eqjL=>ramzZBlX-a|O~A7sPpxrP)|g$Xrp2 zDw`WL1^ZNN5c1Lz<-a;{6m*#F&8=UpljjF{#=?d;#`YZ4TL|0@D5kt-UfUy{Y;Ue~C+WSPhU#&OTr#TQUtLfSd@2N*whIFqG39vZSd;m87$zmqvWKwAj_ zO(TwbW)U}NEn&4>A938Lkmw9o9v*<-5N?C zvguB{o>n;zYa7ROl*gv&FH2BQyj`a*iYxBZ|A%uk5x#MF=faz)?k$SZGv30>kZFW; zgE{cjfF_cztyWblBmMkYVZC5h&r|47a!ByJ{NLxfEEF-gM{VjN?j<{%c-uw2*M1{n z(%1V&p^a{-{L=q{h4bjUMnR=ecn@pJrv%(#gui2u`GEYT^r&Plldi{_>}_&i>f#_s0g)7n*27&cL@Yt_hEDA$MZbjsMi4XE76mI3JJ z%d4BRT*t{p;z`{NlnS(;h`?at8Y>JqX`k-$6hGr=5M6LIUQFl^&1P9k*@a<WQrPz8=H((A$rqup|2`5H>JIDuuzJU}n)! zH5flc#ym9hRCcT@PMaH*GlvK42Qpo!ybVO}0FRPhGYJJuZXJ&j`R$3^?d4J-b6Ooi=0h`l zDlH1=o6^!j(u^1Mv9aoGK`eEm#HP6W$LQEXQZuny6y6oRK@_RgTI|r|5Jef&--i_G zX2y;^P`SxxMmkXk_$+R*=rBb`Ia;de@vzLN5Qn?o3&y9`ni0iy9H*$=(QYt!zxuWQ zJL8IO?}0z&huiMZy>N-SGe7B%hYWJFbyS%vqo)oOUWRXrB(-YVNz&QLBJ%$9EwN+% zFq_4N!xrRCGwg(zIUnw+Y0-2xmC{mS)yjX+pmglZPHE138-_#4srI9vIzScPE^v7c zTj186TCcPvd)9f~Ju&mF(q5UB>%|xCLGBhxjrxu1<{~(A^{ks;p7v}Ihv?_x1TEDc zQt7kPiDpX|J$#IIH8hzPBsQe5-17!-KG%~|W;xEnAcm9fqk6sK*{4WzPy1~+Cnc)=`D`bHbSYmu2 zb1k%Rnq+ckAg&S6K}x5be}}18a2bS8jUd&o@(v7BL9c2BUOJ_~UoflRN9I8>x^hYX z(Xoj2S5U_ZFR(c|Bi-7B$>wP7Zr*&_qO?|;ZnUys%TTH-d!`A) z%FAy;zW@@an;hG=6XRgT>&OPrOK<~d$k_8VL~^X7!T2kRWbeX+@-dy%^fb{Y;=xX) zzEk|>+<0zW$}*9f^@4&og9?=rSt1fB{JyDxmPBhVXOgbop2 za$IM_V2qHyNQlTpdq}=RTxb^|6wKLna}_1h9~C%%P$ct#^4N7sjLILHFHe!B!jZs8 zun7{BBCz}2lD#Zk3GZ=>vx|TDkRGb4n2$|{1#A3^J=T(zB;!aGq7u`~fJx2G4_6;| zoTsQ&im!=2M|eo^Z(U)3Cq=PYVadNl%n~B1yyuaYIc{s|eD^$)cC}*-egT|a;9*A> zcy|_NvNKNS9EF3_mW?C3oi6}fsxpYdbudww{N0px#W&)8%!$3gZtFT9waEOA@Sc!- zYj1z*FHXqp7aRP#f)6nXqNU8<;Hj&xU(ugdq1w1DkJgm3w#(G$?OC4`F2WKB&B_^G z=n0@$t$rUGdu~2oFPGrYp0n>v(bfPC!d)Yv-3b-|2BaEL{6~yZ;C)jerPZwb$^|`H zz*+27&r}0WS2lig+SQyjcqjr!JB)U3%a167Jh1YlKH1h~KTAE!$m(pS+2PE)l}Mql zPhtp;ZW5f0NygY~5t-3V!JE{Xht_XHzDFQQ@}ZlFW(k(l0%$uze? z4zm;fb|~o01ZMZ?j^L+=RYIpyxr>H)VH-X2>+1v~F(TodQMno2!HFtYt+zLMd$!K#r8ir2zH+Ir62E{^U0TyS} z&+D@xBrCG`yDDwZwl@~Owo^`*_fxo1C7`_gXVtp;tI!~ZK-Uv7TemqzrAiAr#_cy= z$fT5!Pd63~{s0mZnBj;`#@+ugleahY2-cFN!CxPSoxeWyZ2qhr2a z#x^#k`!+&!CeM^pZ{_slT1mxpG+HJ#o^?etBr@uFz|y1!lc3^fWX%e1@$y>G?+|py ziB>jj6ky0Y#vuBg(eb7u2`u{i7G_%SF4>s`vgqXs?Hq*9>`a+ zI~_h6Y`=e|{yE4A6yGa=uOHdaIvH?B7MX(VI zg842VC|>JR1rh5c=(o zcA)WEJ*7Eh!^p&bW+~@!>XL_he&xQtR{WYK>oJiXA(!p_7lj)2+53ZVS z!+1HT*{uC0v0Jj8;`8{8UFj+-dp^NY#w~ngn`=UoLAR*n|Axd|LB7!2) zDc#Z~ozjhngn%@NG}7H&(w&#?Zn$sb=l2`$`~H4oForSo-gD00d#yR=nrj_F?@!e` z`c;YcnMY5eQhQ&i>JTJpCAV%KXM8Kb#>Ec1+Hyq?dlw+lrz4j|2eOjYwWe{Jkl(i* zTpr6LgR;bo@4vCPh(fONz&|5F7iVuCLfZ|}t9ppI$vu5t=_*3`;5eMm2C}7MpQ`J1 z#*~JMN7!O|_lTc1xu_$)Q>4=;bL$`tmLR2KNn<v3_C%#=9QlHNE zKIN7qh_jl%1%X%FZ`SO!xOB3o^X(WvCNb$^d{kkCtad#z->trOdyxNklZtB9l5 zJt*SnnxtsT1tft!sw-FOW};lf$z>eV7e_R*9v%YlxQ#z4!B^o3%~FAr&Uv;7$H9|q z=d6qv<)hP+H0&MK5q$5GS=PHYiQ*SEoq*A@ABW9DP*-(R5l;7(V$HYom!o42>Ia5P zA>0z@sTF&l_-t=O`h^2srM3%WINYPF21*J+hYXzyIN!o~SP*w}F+m+9$5E#C#)0Q%9H14_HC?WJ|=b3v9D1YAnp!oYi ztIj2shvFT#x6hgMZ1M;_kG1)2gC4sc`{91=W-3qFhR9{gX06?m%JU+2;Xi*$C1fSh zpJDktF*58T#(#3Nafu<|c|Z-F?^ADT382B7|DQ#;Q#W9@CY4;QtaDV|e*Uj) zgK4ff*&`drXL(fkS&1&wmW!)d!Bp9U9_|Djt_3m!`D=5L)`_mRMRFX+dmNDovdtxV z$(2sjNIs=9bVeeb*DaD*k|O&h$N0Kr++=JLl$TO}+bL`A+)xk5*|@fND{?br!i0I-P+RScA&YsNp*#;fWc%yg^pb?45n65opPAun@A(#1ZHN!E6I~~KnHGKQ`n^JLr zHj|l%;O6WWOpG}p@j^`deUwyAK$Q~H_XRKu@-r0j(%OqvZ1zNKXhGmtGbbcN!xzEq z6529z@4B&Kc6_tPU|r`Emp8ig#BsHbbnn`XUrzghj4a+-@{s!e_BYo-F0{zWHnXO! zR9u6r7JW*&WW{}lcm8dCR9uN?hS8m>#>F|=+ddhtJiQ#H(29AL?DgE$p1}4i%X4r4 zVs4d_pT?0>0v;!0&l-b&Ys)tHMpt>F#7vjwc$|N8Pic0e+_X*Y_9H<%UOh{gbqjbT z!ERrBD(Gj+OZmBseW%I4ak(X16@4AK^#~xJd>ts*FY;5uA`HSy8^Xk3>UWm(XnyuVb z+vkFpWyr(Im+kyu6{Sj|x|U8$^8cLmlM#>-5VBN?R5t7F5FkVL;eJor4a|@dP-Y|1 zXRduDaiQW%$gxk8vUlW=oTLs`DmH!smz^nvuU89GZOWaeJxSk+hx3&&V9>obq&dL< zWNP*KkE36Gx$t)xhCVQ;faY%mbyc_0k;k6vh264n%ji6A=0}_W-ibVG{mEhcFhtk& z{=n(`^)x$2hp`d7uX;(*Z}b|_GZJeLyJ8Me^svMQW&|_kl%h}axz_MEUKZ;=48QsP zH`TseS1I~U8vEa?`x%XobUnhvm=|(i`;GJPRcY&&G3k{ylyWXYzUj!+^awcHf9_0Q z^q2KctEx+N$LODI43r{qd>0#Bj@+Q#U@%~fM`|Jn@;hCB zcF)GlRw&)I2yeV+Eu&(#{b}*+CaA8(EE{d$dVpbXL%{Q(SVZ|EXwFY zS3*C$20cDKYA@YdA;uQN#`%I{S2Y!pM;J%6x;*1Q?d-~I{3jR79vs-FYp=#3#xuy# z1bZ6yGgyYVfd1nA}F>aw4@ zX=9Lm8PvdFh((buM&|3+7+*a5?qipE`$&D?-}6IFodQvqp-AB-vkYR^46(ecAUs+afv$F!yu zA?2JX{vM&c)UM3V@#TBR}bSLBrp7m>KeaU1?il8WIN6O0u>$ zTYeWUOkq{fGq}2x3(doE`}ID?r4CjM-?VX!24EVz-_3 zLp?*NJjkA4?Dv#vx2d+H=S8242#;Oiuo7PAyPIdJTYTo!_En$s2>ek|if=gA+ zTL1Is)|6%{pLix4YhTe3J4Tl`a#K?95=2KOg;k{B7tWOBW(CQBg!dJ>r*VCe^*YMw z^-bFK$B*2CZ;lLzdgje^-QIIE*VBw&7QBBG*&|%!112dbTUnF8wjME~EH`!>S(h&MJxO?B7ggbJMC>RpKHQFaPJlaIqe;7gv2nTIr-EZ+JvcH`qMiS8_0naT zxNGXUo*C4@Ne>QkOHG-nYbf@KrB*HSOtg-Qqi<)urtTTWP~$$2%>D6vG6KZpDSQov z{>Ib^*=`U$0z8D$|EK{YH!Bqh8FI(y`YA=(r=$-LF#-bVPQi$NCHI^o-HNt5C9-tJ`_pwq8PkV= z%>Mmqp$6{H1(27G*_8EB_k9AFH+!8zj4)RFX>wVT2M(l53crHt;tDqwIcGU3#yB!k zUUY;uLp#>W7lzE%-)RMzpZdFZTU2})GCmljnz5}I>TMQcSGO(v_=nU)HOc4iY1ko` z8iC{!$*q*Tf^z7L(e{|_>UQri@%SR~&K&uC%53~~vnah8R|3%oViCp7H9Z}TmruN} zju=%`$kfIr+Gh|9;BX=wokbRAO#hwCWv0d#zLL}?#>SV#Yfq*=As4lNiEm3Cy-QVd zedicnKr_yr$3NMJ{t@$#SJE-CKqv<{pUda+NTf&$1Hz+&qGA@x3lF(WmFwSGC%vze z6kNWz&~7v$GJgE)u4gUv(lp8z5$$ZgQx=fr)E$;GDMaJL7KjZ< zMY?dlU%S1&K1hp6x@gHK?avOw>AwDv#{DJ>KF|X1w!w$jN8M`!G4n_%aB=u`4>}dP z>rYLtqc-HA_1C*H(>KYVDZ1Gyjb!l=8vQve?_#CSS9)AJIx=EqMS1+swd%CZbG|}^$%xS!gTYRjcenlZ)NJ`vJ?r{Z9saP6$yhpvff+lh z^otvOnQ?rc=q#=`0pE59o>m6_B$4vQ$x)Ww@rUxcqU)J8_-dJeY>c;u)*_E$i?y!zH;RALYj>EVUs*p}A>yEu--S#fTFh zh=N^4$`1s{hXQdKTMmCyHpPpr623C^O>lsxacOb*Xkmp0SetdW(V*m(oe|jqZR?#|g^1_qMG@F%e``f#3M6)wou%0E>zBdkP4#~IHXo^K| zvjXUbWqjVnMVbD(NWH`JfghEE?j+-a$DM5^bInUs**S3dh0iA5)p!&8iRfx?;+CeM zW;)E0hPdocNmuss>t9y>mo^0jvPRyA#$}qsFl%&))FT6WHG~55Hl97`+lU)38Oz$2 zAlMjLIfQEOI{PW=Nzs^^Kfm%Gh%0=I91#wk*=b1T#zV~o7sf)jiKe`TuWSR2dXO7e zh%NdBhRc>)=q8^+`MX;Lzg;gBi72LDzN=shlR|WmkN=cHRhG#PFbI-xrKB6kb`8=s z`m0#}Jh~pOf`g&^kr*VT?_eUq0!E|qWk^$PJmr#fCnDGdJ?M~_Uy1N9D5lGm~jMhZrE>7lq ztYxTg)9=apPJD5s9zK(KM@Y{f=&gKDPhs_n45ghR?=C^RK+$i9O++ zzzz$<9!H^$wS?Ji%S;jx;35)+=OhuUhvy%!{yIJCE1cr^1ACj8yrAcX?IZ6qeY~g2 zjJ}2{7+>f{ar&z{Kw3%!KDDwal~Pt-3-d{6w%0d|sy84v+2vgeaI@o`st>d?%QpqV zt#BgQB0qWv-VW0h#jM;R_a+`S6PFGqG2X^UZ4h6)qe8#tz&(${E!FU?49@`%2GSS% z#_?S_N|-%X?~8vTY^%DD*XsWvR`&$rj%3|zw>aAP5IV? z>uO&hEYj)dkI}psv1hqaDZL<;Z(i*&f2A$uxu0=`Q z>z&^nf_MLLDOIw)`1L(OUddPAPSe)DUhUoKbtS~&BKfIXVT%uKus_SfU6CyB_(BQL1as&7AYk)tQ~_NNQg@VejK*E@QViPLY>f~s7Dj{ z10%RBE<0yj$3-!Xd)DV2I%R26Q3PwtaRKl@kYHMEE%_4qPJ+ zB{8P3?rwke!AhxMBJaw(yV6VH3BeeRkV;`SqxNE_5X|jhvBn!}S`PRXxJE5j>bCs8H5o_Jde4^uP{TNM!z~-ej9KrP0d41 z&KKVmr|}>2P>WZH^wF)48nL`FXZ0}~>vQ(q86Zv3b;$aVvvTmD9F_v@L`ls4VO3;) zQt0)=*)0>IO@>MC_X8^o094|c_1!)OfyOnYE*s+X;|c;uO+0yIfD=;q4Yqb7+6hKH~CO`rP;o7ZnLAF#WCg zrMF*cS0q?kIeA~tB%mJUc=)X9z4__C)eISb79e6tS(7s4;f5LrFTQt6IfrRIk@s|o zlx<2@6;&b4CC!IQ@Xaa7kIgT+`aQK|n|23Xf)j%BMC|AxA+3+#G5hkBp34K`ZnA zd?EG^H<7A2ExIQ&I%eZp$v_-fX-CIrTRj-B&}|BZ%j6M_%Rwv6#eqq)V*|bUIX?U zGc^0HJ-C3Ypyl>!Ds$tQ=I#i~C=6R4|FjEOz*00UY!+#uS0P_gRlw^UUrtXn&=xiw z;_^^Vo(0r39&aOo1cVwZjuN!+)U8g@u?d}Fg%W2*u4+o*qW zkO>CPTtU}tI;1e$kWa;f>Xmmx4Ic|+qxTc|<10)2w)uVIZs?x_Dw7!sQwB1=Bj9G> zPq=hEuh)ze4h~O>m)*7Z@)Dwp_AREjmZ`;6pUS1#zpO{`oW`T{PieWV{(`uKj%z>% z@Mc~OgJf?d)7rgOE~?PHp!Sne4HS=2^~xokM6K{u*FkdC%myJ4BM>~r$*!VZf*q-Z zr;(5Jhxq*kqtVPi(CzdwU~f!jp1Bj841dq4syGkj{9Dns+!oyXxKe;&wB(df?D<<6 znA;*bYH`2qbY2 zgt`pP08ZAKJMYO*y!o2Xrw`yt$y4!=qcs>^qwT%`=FCDdXt4bWPV*-^#)OheZkU2+ zC2DPVc;wJiMRL8+cKPmBuXMVGAfa~NSvWN|!qU$i!40pXZkS$l1Ufx$Q2BCKzAhE1 zlMLeE-TkO)O}9XsM*!M9l0e&RC;Wyd_uKp&Y@#VTn6{up6j9cY@9?Yl3{wSmXEXhF zWXlkcChYb%A-k|82kO}83hJ}IfYzb1f2V_4{b=n)tN&wz2Psw4l3SqWJ!m(O2_B;S zn|LSvL~6NmCY+O+1&Jjd4!q%d=6LUTDM75Ojf5R)eD_parY2-=rZ>;>?>&{FlrGYS z&jl@CV`HuPQXL_n+AN-{CIdgYh`=QUp7IYpa;pj2g`7`)1^s1K?OLw3i^z$5rtZml zTJJNwGv4-pP^Bm+k#qg(TI+6f|Ht;La$+Q2g8Z_~iAnZGZe<-V{lg@I$mIz4)q1S; zk=$O!=CLKUG~r-gy!Fc5+)~QWrKMMZuA(=0XGr*4b|d(1*|2`| z=4tSGJrU0XC=z+>rDb8#oe(|j`vRNS67%LS-O=Ii_e9<578uJF3EwPLkrJ?Yf|N2O z0TBPL&Fi(KqX-h}(S$|%)OaY3dXF%}@$3cN97#76S-#~Ln!0cQM5=Kc{(Qc%Q!VEVr{#=^xJC7um!uYt9+YZkZE10UfxzK5?Z7xg;O_ZqogNh35)+&k<;KWyq9-G4sH zfX`+N&mj8N28d+uEhikDeiY)=zNtky!QA<451eT<)p>%eJ?nO5ceO5aPA4T%?Hkk` z{7-0_Xj)+M^W|s<`lqS3_7^uKs`aa3v>gnc29nJg_%iE1D z3IH`aJ3`Xx%9?K+_;0NLSoLI`3=qvz%@f z<$E^wH?qovGPs|(BO6St@`mgXbPA*;#g68B>yT?7ZE*K$_RSGYUgD9?CRa%y9s-OM?3<;*?coNAQ0cgMFf&=07hj}>YTvlRE3K8EFmALi&wMlzkENjO;^Q@64jXt-i=`*+ z>m7P$n|yfb9c(6+(tpJ$cI}XS_~9{@14npMohqbdW|1WN9+rCPF?paJ86HY>rdy!G zWPdlRIN*Kx&VP0ncZ=Z3d$jixWeO}Ui(@`Jjcnk^4tV+SJv2J^rQZTMQvag9{OI&c z8jlS14sC3|-AUiOuG$9!1mbQ)6GV+t2eGERbw{ZgQLiW+kx7f@+&_Ebw(WzVn##P7 zpLBaANT16{JRdE|)LLxRY&bhA9f^s%eIkGNbimyV2{UB@>&L7Nw$2k?_znggB?`w{ z(~zAB^+Tslg3h=7?+otf@X-!E_WoPauLmV_^l%NE>mZmZwY|ZQHXl~0Ozv}8uEHq6{mw;3QL8Lnql2iAN+rX z|6hTeo>CAj$^ zc%T6Y4Na|HI5@-^BP9|UEqcEmI=+-6a~K>wc|1k;Z8zNTWS47%o2yek#RgmlPw@j~M?$llO;gC1lJ=rkbKsk9D6yC^P$Wlaut(sNw2fwznHo~%o98)Y2 zrgv0m9qEqIg>vz9bM7;m;bXEg z88)?=DJO~Yl)iGkYMWR@bO?lx%amCo+B9z%7PAr4;4=H&+x4HnzQ>>TI$>{!68;rG zIt21EMDKZ3$#kg6?qUl?CCf3m^vIiC)-ny28ezAJ?FZfB#VasdwAAW8`756ciFhpY z+Vc{&shY0IplAlic!}qPs{rTiHU>8qHBO$cx^v=k<7qWc0uj-2Q3F{VOFS~|*v*-A z{Q~lG_W2tNXVj1V^26n;D^SEVVpX*xZ$uCVr#737+HH63jt1b0+z5>w+zJlQMOVJy zByyW$c6G{q8cQU%qNq$Q!BySH*a<+C)L%&6HLz^A9V7nl=+j6h*pNUVNvXL9cUDB0 zdIEkF!PT@S^d%D);E6o$Gsdn#NndsS}f5q9++ z)V=h(WA>5G#cg-ch$%(Mk%#uWo2TR~F_`ikt6aLqN5+oRq|^N40^=t6=x!ogno6(? zV9mF7Ob^*zc;41lEJd8&J(3M)R}wV>HCxuUOnhp5{}2SX8{0+7bfixS%+hWV9!Rwj zTm10RQJ&~D-WktGhQr>Zt(1*KuWVh@2{_(ISRl$+dM26wk?sP!Zspq_JpA=LsZ2hLksc#ZU{V=K?pAgt60V<*#8RHQnj?FcP4oZ z&nW{o4b+#H2AVVhl@f2sa!M=90)Le=x+6=`ig1vV^GR=>V|+tWK}}A3rlmF8^5ro7 zvI$VPgjlE@=L&pSUpTe(_qNeC_(dDB5X{J~(mb4qK15K=mEz(T*77(ibag+B(MgO1 zwb?bo^_R~HCA8HMxQhi{)vAu{FQr^xKB4u$B^K>vVPcn*ac25+!mY@fuVgZ}sd>D7 zu=m9dW(QK*F8cG&6`3az^Ro4!;1`rm@I%|PrZlZ_vb?$#_Oj%qWUyp==g7pe*n8&p zG_m}Q=cjfTPqc=e;eKd?S;Zs(n{_UHtH++2OfSwLB;1HEt*i=cHiNh$pG)QGB}#ci zUaN%XEB;bz8~Jkh#Ihe5>h-7sx#zDI5WIbeixYypzG$V1VSgTaDJQbj}u63>k+Is@AmkX2!kyGEt3owcOfZjpNAz}ILjWXDJ z#Q46n|0n2s)TxrZ{4bb+?fnVNk^aAb$VUF(mLr7!Z8?Jb-*za-|80kOyu<%(hZ2QU zmm~id1>U-}UP5WC>TJ!h{IN&h_1T~)#Lw#Zxl%%OOf6 zn(SDf5YkFyh%1&9#JB4IZL$)D{7)kj<^MJ^Vf}Bv)c^mB|Nnf=+zbLWR_@ZnVql3& zp&|u)(m7MAkCiTCRLIw%>f=j{X>Zr$ohuM#R#$MMKd*+Mm@Db(U@8`&tj>}q{`lc< zHs}vXE6V`u?7BC=q)dUT*dh7)Cguo4$m$weA1nJCAA^gb(Brc_(+D1>=s1Y=$&0dP zDLQVxY(DJ3!O2XM$apE^jLOEKBWdUkKYEb+ik{G-0_sq+wNB(Wf*@tIv$bva$I)WU zi#ae3lv6q6y3!;+%7t|e|J+OA$Xr4hAcxUQ+)2!RW`A7b3arP@wTM60!-EPvCJ=}t zHyVg0@L9zr_-d=w(B}LouX!&{r*WRxs1XH%HkmHTAT#T^7tk%MWM`kO8y7M(OA+z? zPC0T6^T8;(x#RB8B}FC8%<3Xpw$~u=DwQiAL7S)^arl7O6^Y$6dDdb5;hApXQYE1i z#<|d(3Mz9MY`~jb#06VE;%6v>JbNI?)clsmW0Obr{o`&k1TRmp3a(H@O*e0|vVx?4 zREe|>uF8)xqB9a^g}h9v@DxwkOL-1Cg4~{OVS>+#Y${fK)|tDoM9sU&r`B|nqxi~q zDGC-=;!@08(+I!>6)0Rcvry@=>majD4Qw6%)8xJW+u7Th0#=yr*MYP`^r)0Wso<2n zq>W1l+JXH$dvM-OD#V@E@kLy0J9f(xh{s_$9pTfZmZDH{ka4A@MS}AFHrZ7;2EApG z0N@fcWhS`cn}}+CO~?DIi|7v;B4*}ude8qM20zex_m$ETAQlv?ts4x2+_>q3uxdRf10a)Op(<94sN&rcKEaIn4qy94- zHMDEWwh$rBtoxHNSY#1V6E-zly};&&nJgjL?Nakfs6F?}`i_@4`@de>XHrYfPiaKs zgUcBW>5zXvkYH1e&Ag_~R%MH_xjOWdKbqUD=k!=5{}~JO2zrCj9b}T+vd+VPozQC{ z;Jy;F=s{6yM|_=goo@udjFm=}qS1*DR`6i!$>w6`a?@+#%bV*KdH2J|*3hp}|Gh6P zJN#-LTI6cnk)>ab0b6pB)PDlof68<^913-I6_NQN(&3qO0C?gqS`xTTF zBic@ARh}}BP@999JObp?Ge;}?2Wa9hda?9D0O->(-10HyGb>CfAoB%dXjy8x*1OaF z6-Ln>y7%fZaUdCW98*+Ld}42n0r536LUUnsd2;)ty>GR(*riC}xxzp&%>}aDkLttrdBc%EHP|;Bs;Z&>d!u(o-U2BFR9>_U^%7<-wCH!2H5> zGHTBV!vtu2d@>wBis0CvkMd$`XKPM*@bvop;ZntP&np4%?Yy0uiu}Tf3qMjg8?z-f zisa-UM)T>%NDKYL?N~9dV%`qPApQOBdju$S1S!Yt)t5(Qu_GHyC>UIeTQlg6u@>-w zrVPoB08l_YZ^qGul^URUFP&sen+jO6*RM-j@Ek@DbALa(uhaObVQ80@;cB>t7qZVi zo#)X@;Be=7O@DD;PZUbRPZQf;S@jrXD9A$%yn+l@OM8z#ACJymPJCy+I$fjGmZ{4- zoE&CU+tYi?IONehF_jfrp%^AE|L{dze&2xQhs|$!JE{gw7ZO^3y7P+! z@St)m2@9fb125Wrh&&CCTpy{Jww#T3+wJynbp+Slq=V z7lk(k6Sq_2VWs;=YUGWs`0GWoJneVw$9Lmy#CWIJ&`?fJQI94BXpP+PQgDCuDzOLb zHc*P}>;itTfsVQT~QQr0eBr1TiTmccWmEgZ?oX>`2Z&#Z55@cXh303FY({afH} z8=X^omOumePw4j3tEEMl!3?)8H1xQ^%*60%n>Ju6+~h8i3Fk}QDL<5PJRH=xbnnjGSbp9nYQkjk2IIaX8fBl>5t{$xG8YsX z$z53&S!-FR#Uu|2V4m?EG)Gb&>UY_@+axJYJ)Wj)FEAO{<|RGhYFr5z_z=2uF844I zr=CE^E1dIF$~4iZ5@ln4&Wv+NA z1fI01X7@L{Yia!}EuA9-lks(o&vN2&ilXY7YvSDr*nP^dm00mQi)lIO;N)4;LiH|S zc`k@yzwhi%TpF5o2pT~o1{`woKqC@r-#zPQ{L|=m#rjJB)PpqE2UM%hu&8`N*Z>61 zls+55w|r_?|9EGLI<{ZPp+>atryomKS&7Fk0pw%+DMEU+p|6n7Fr@OKq9o!m9lh<& zB!K9ofZPZiUr?zXr>^U~zn?@nZJr?obOLk^snw1e%~zU#+ZYAzxNCh)pb_uj*1*Oz zs;O0x7#mnHA68I~&Vogk;I#ROcq?gDrc~${7zupt%YzluZu<6J9~1nTl>FGtW!Rzk z8_n?bj^e0dU^F6^CFgnyew%+PQ3l~3Ev938fyoG!d#TNC<20}YI~SKyXa#jQ?r!nW z;9zlhaV#IzrhvL2cX~4hIXGwRr&HbTziGeJbTgHv0MeMi#RIMWrMy(X&IRq=R%NYik9rd1TR zxj1@2lsg<;Di!i)hwHB9&|_lwUKD}DY;;Vh#AUfUb)6gnq?oYC<`RA($fVi}RzEXP z+Z-7Wr^vA_Mn&g%-@({$WF~%RWulOUv4)`ic#j zXx7QS;~iy@W^J#70csp}Yr>=xSpU#SI6OpWe)6?A-Gf@gPU4^`w8$)BCA6%Z*u$ch zoDDF;mED7IbE8~3V4!Te-Sq+Gk?-%iRt^b#qlrAjMmUW=AK+D1lm>)nF#~9MywxLp zd@Y~0vOI&3p9XWR85|coR!)(R z+ZPWjF0`f;qD5hM8A4SY)h*Ff)^FJL2G@%xCMgRBEE+QjMdn4v*uv`!pBl=)O@Cb3|6|c ze>RSKhS{bbt`6;=Qg5GW+W%2=i*(u9PDjZp@D3lqx<6)11|q65!yn#M=6$rh?eDdZ zfP(F?TjYbm^I_I3(g)Y4crhGjgpS8MHPIUn_qU{z2#ZC{Byfqb1&5dUcdcHsKeVLu z4sTg6;XH!e`;2akR1qNfu%y(69DlBA$d@(B{HqE$kil_E&UEz!L^rh-1;{bWr+WYa z=v%f+`>IoSbw}}=BSwJGba$~%<+^>0NE!=^m%<>943y_cvw8%Je+Zuv)c@I+uHybC z*TjE_;Q!6z0&^#lD7P_B$OhCHfGKsKt1DGz_IYRB&jI=*U=gKlVJ@B6CCwhN=9yIM zS~t@#(Nz8W3%y6mr*Ps79SZmjfwqh}(!7lHA7_IM9xiy-f#n*;N1{JZX{(fm$JnuO}5 z{BY!=?<+tVUjY{W2*>xEY;Q{COQV>R^2G0vAKXQV@ekNZV|#kL|l+IyBcjJz}W9 zgPKUwl?j9H&-0|AQKujjH2p~__9B+fKThw;q}_Uyd#zgD4~FQ&~ehea{5~7glH(hdo`&JmF?V6yai8@JCbgKiS=1BXBv|00rz9zP5Cpz9Frdq z7hx2mfs#NNCmLeEYOB*ax4UG>0*wX)G??AX>hF2{M?d!S%QDYq#V+(M($nf6BBg9z z|JMufST)-B2ifMk!hUaUd7~p^WcJL;X>jRrHAWt;>W>CnW;Eu8%eGtB%jES4#_>_9 z7i{z?>ej)_4gx4gsFFb0LjooBkgXVZJUlj^2hjo8XUZ(?bRdcVH3WAo836$(&nQ?C zFJYx{#4|ql+T^pTA|vBNy5jzo!jtJWnR(^G5*4LTPXD*7K_4kt>oH>zHPwdk(;9=M zLLa0g3zE(Gk0$FF^80`;LWuq4yuXRl)e1%wB5qVb6I_>@A8X zSY}uPbM@fW?^qsW!qYReP|(gY4LfYYq_iH+7K6*T-MY~nd9zxaz@1qyBP-uB%3A_P zPCb%2=v*=}fjYW@wfP`Agon5q{^dV=>8_7_U(w^;i{t^m`Q}FfLrHADsQLC=AX<$l z-gyXk=pVEX8;*W`S9CBU?r>bHcu7jgOMcrd7GRVRwQ4+XuPt9cWAUPLHc03(L25Tr zL`U{lw@68|?&>x*0#ijv%~w0hZZ=9V6_R0`Z_9fXP=8rd=YEeo|?i|UVePYl|4b-v$Z*Hv|%oJVCVJTF}z9*l4VAh4CF#CyiZ zW8<+h2IMnY3SJDL_$fIk*$eLy0$J?qH%BtJk4?*rW(-FAQ4?369(y^35i^YX2Oy|O@N@ZpaP=EQsS@n3Ox+mud zWxN4AmYP*~kUcj{B%NQLzIfcTGt(Ire`-B{RpwwcXg3PmOV<5t@Pt3^O_(e4@zA=J zdkP^J@qp;v-I35ONdxD-8te5943CO*xO)NFy!Lm#k&(K)N?x1>C_hyRa!7(({#um< zc}cY1VkS1y%iQZP%Z89JR%UDd(S{Us^{8|M#Nl~7t4?w8W*B&D@^lqJu{lRbT}3wp zSnG`DGW3W+XipW704*J}J7Y&4|Kzp{3HBx3me<@62+%s%=0$?_Fdqg`xQypNbjN8O zKbHBQ>it*&pHHke4ghJBr?peC>;nPnzA9M+O+xZBMPqe4byk7(jWyH1PDMphoV-Q_ zF9oJKt6Sz-hXUpK^U_CD?;MWItY%JGq2{+W)Akz2Co+*-o?S=wM#UC=Yy*5dH+yZu z5k`7g&9{_j|3Go@XPe@$2v2ln;GN3(*w@TB(91UY>Cyyfg!FMwb zHfIr2_Ou+C14i|pn8X<0V5F$V67PNM{o|gVZVrPU`hk0WOlakjZ1adY9OV46WvYF6 zIn+d}a-PoW`T57~$e%S{IaxWTOQz(&&QMbM)16vgd&<5So-EQ+{`MlwP(-yk1wVLY zgicnJCHBba)L((sJmbZ&aH&#=Hcj{JzGdlV+Cs!>v^Woft#~zg3Sa#EE(LzCfZX6a z&Yhd_d7dtNNrgkc$l%8iocjuZny^kogO`%%a=Ky}LN04}8JJj*@pp-Mc!I$UoV{L8aO!A*UIEKzXov_68JSh{x{XNBty}0qN+h+9;_Kz>g=nbV*5siUT(028#P!u2zpmNaYgFp}^~4 z&M0|3!N6bf!xA7W``;q^GFSfy?P$9bc3yw;9*z<&g8d#i-yYQ^4;?pElK3JYv|G7D z-W~5(i(ew3pl2(xZZXPixJRBE7tLPq#ZaDDUD8^iv1h#p9dthBg`-myKQxJMoISwa z2Gm817GM8XSv{Ljc6GRxHQq2GDplye!EhxNcOF-u`d zylEQYk=cGa_INTRg&&_C=yR5}4wR20I~&~#_E=uS-kqthi81MwUN;Log|jyVrj4Qqz(PvuRFYjtFLM8&>}s@c z*s&BAEL$uL_Y=55el7V#)OWh5Zr%iA{Y^?K1a*dz5^n<7^(Quv))#idAfYOLX;^mw zKC3XrpN_!dAlW=Lce)44lZ&huQ%rF(4doB4Kzkd#F&3>BAObZR|L%kj?Ie{9+d*4Y zV!rkb=YQF29E5eVe#)|q{jk^ZI=m%n_AIV+e{KDm!X{gn-#voDc# z_>a`x#U)$ANvA~DriJSnO5^)X>28q@0kO{)5I$JhJLswAoIQS_=f%nRwzN+SsM zRp3a;>wD3A`z4pRZ<~LQ$N@?4j&<K&(W9Yi3amo4%Bj4q1&ohe=7qxBrtt8{h|abj;l%Ppr!ps{#^<}Bcl34h z*9W@_mjI=wK#8V)!F}BH45bd3ni_bFXvD6IYa4WR-%;w`eWui=&{^4D1?~(+mC$@e z6}1r1O5Sx&&exq+2nDt2$EM!v^CkNYL|QKuY_>q{lci zf0u=o<&j8y+!qFh?n9s3;`~1^*_*LzgzDNNEQ6Q~2vGY@>_LqiipCw@*>>V~*d}4E z=OL{nV1%Q4ZxORo1FSVu>dmOqO?05^ZpC;rAZeBqU!5@P<^I#uvcuNz2GPJiGnnrV z1sSQtyA}nYLQwxeOz3|D`Z68R9BGP)pnXxhr2XHhsD%Avnlo!MQRJJz+YJ%uR(95q zfRg9;2Od<-se2fS=foG>#GB)t6Ljp5^$s=sZJmo=A1;d>IbQlBEWv><01hEQ2|mOH zoKOO3#Zv&*?4DpDPbNwMOe%puL~uGt@E14Q9QlcIhWcbFC>qrO@#Os}NtzR+l%raI z+<$6w1SfhW*h@U`=C@^DHxM*Jbj2E%p$7w; zE5Ws@>3loUb5g>>1|;yUXHO5%K$Qu+YEqFQUlIg-n}v6L7AXPmwAKAx_?s4T#rPV_ zNy@yCcJHol**ycU)5J>hB^7mbqBh072^AYNfy74^bs-0AIFHU;;6&n*!TAF=zx6RcN`{$Ch(8Iad&lr}KUbOs_U3^sc`hkbM%N)%fu8EBL*v{7b{S zi#YbnN4Myh@KT#%(dSe?AQid70d+EcNI|i^1xok9sDUE*AoUd=Fn_n)HVo5Z`66TD2 zgz6`i?8lRJ{3Ks;I|e#(VY*t6)m{;<)sYKI+Q(cR0a{V(LN%dj9qegV?L7}!=rZt~ z0O(SSAnCE{Mw(H1G!0(*90pSMfZY+F^m`s|qx=g3f}-{I`Z(yxrj5k7hE5k9+&m7BBDL-4X*vB<_91E#dk4 zj9+T3DsPz3D+(ynwwp1J_3sXif94zB`>OxUH=$2%hXhJbc`pm=<{UU|eRC#7B(1tTANyrE{~=>4jWcQ?%XV}iWj4#?cH#)xk#*v+>TDqq-8r^g1h^{87klZ z?$s+zN5X4wgZpLU&G8*ku2U|1a7;%Xl7crQ`<{}A_^YXP%jPzwtnG&gnW;d#|QN&EH+PG%5`Yo!8(+GH{>b*q!Ofuq^b+85 z?3WHfnS(EiNJK+mN_QV4Uw?@+{0t-|Y)Ww3jkPd3-*DsGC5))Y93N9ZvXonu9dK2+ zW9@l_v3C)FLJ@Ghwg-;TZ_SOAe`H>dds}Tr7%dt^phvt^m-{K^(!6y0nN$NPi+U%s;s$p7U#<3;>0-x)c=f4$S_|8|JU{I^4l;lCYX`2THh`+xT~ zv>&!m@ec89^%1_al;@;LfGHR!95^H0Mgd|w#yAfR5`nf9f3ATFWQn6=O&>MEz2Tov zJ_TF`v9V&PX z-6%weyP%^7=^BzZzYEX93UQ*SOiWaL=qOH3r}IC{_y5@dH~r((i)%C{?MJ%|Ho1Bk zp%WrG$A}jv*G?W)LG)Paw5N{CEYo^RV__nX3fcj$b{90e(TC-vUg~%8M&LQ={>Khj zBpMcYc&hv|o+|<#r|`(p5?$JF2i0;qBQk!FZhJaet=H-&mwx>;7dCy!9N&MYhLxgN zeOA}7c#){83glM&3b){Q+~=4RcQM;+VtmO1;{VK=nhhSmye=naKZNWlDI!vbNL`?D zb(8&j9>wVx6FJGv$~X^b<>If~`y#IXirSIHU(YDB-}q*tUaB|plt;hcE_)JB(oXaL zyd?c2EJ(MBQtI{@ncXH0ge_Tj`bY0Crxv`&@*2DkgIy>1sPDma76PuHDAxR3{(~pd z&j%8N$NTSY^M8k_L~0Dz4$)MrjoM8D^YgHhR|v}g4i+Zjp`Zq+K1FDuPF{JdHNa<3 z1WS&SD=AmZ7H_YrNKEc}tAU#G``k%bHGoE1(Y_hCC%+;wadOZ%oglJwgr5asL|XAU zY4Y~5`JsrR%s!@SfH-MBu2u%P96<$nspihc-@{ypt~Q3eHco%OKP4VmTDX>Zd31+Y zXKCOI-$g0B@)U$vqe*cyA&9LK*o<_z8K`of!3b4r%z4;CU)e#`5*VK$o`gumo$q5f z%Bd}^Zrmc0s*UGB8)&}&cE;IN8S;dDi!+~ed$xA#EiLP0pa~h}(6iZ9_|zqp<6}v_ z>A3m}YwLxLGt^IpF5Ksewjbsa{?nQHW#>M91u8>SpK^!870X!@P5@uy;aSnf?|{Ny zYt>Wd_Cnuf9y??rQ$pHDkJj`op0Wk`EgJ8_mh!WsL|^Y}s6NJbuq~ z2E&nPb8gEXK{;HYQ_LY1#i~N^rNzNs7PEZb9$(`aAeR%Z?``!zA}Ew38CF3 zKzbeiV&byRgABYSMylEN!AjCf+!EMEoh^y$@po_$iAUT=K`d+kAQqY(FS@JEy-GUUyZc?v%7^Bc1H=k2y z@Su?Mwk*eKZd8b>1L2hdIc`sX&ggF#aTvn~yUaj`s9BO$<)J_y6~n2`*ZFb1E&;sX zk$k=@23(}Q;-Ykdd*Y#7Nt{nBfY{}H=ZTIPs;tH0b3lf2*VJ& zmwMLTKkRt=b=^qUtQ>cu+F|I(PYk2?&-*szy5o9#B*{rIdwwZcw%y=wq>C7z6BZoI zdFuXyV9;qlP+-Gj7o8U3ivm@jE?>@8HyZ_uf<=5Q?uq$ntV_@BV(*uiyKMXBp#&xV z#{Jq2BYxNAOy3f0PcOFCiii(8>}J9u`zudZ?k2XDMfjEfRoM8jzhYGRqaC<6!siRy zxlC5%+_#`@`z>|eX!ZRHYTqj$T5 z{dxboFwNJU=2LrFawr0Vp}$Q=j^s|rzhlW-*^`s=&_#a(2tyJgL@Sy&OiDHfT7?TNEtW@oZ=#McCif3Zm1gpOHbqOP7UM8SGUME4eZRaqr6RMi0dx;|-WN1qLt$YTR`xHI&8J`gM%X)AjQFj3GV$ zNqTsa*b)2)J3Ll*V8B>TA*)R4 zT^G2mw~@eSRE@zbRR$dFK)9^^%eBMJ3P0IIHD(7ysbr_Vs89fxnn!ga`$`lyVg^%>FA9-&+zJH6ng8!d9t@yL(9;v{7wu=USakfU-zxTdSo~d4{WAI**v5P}@hAM6g4W6;h&% zJM0S9ChbR?5Bicuve!qv1{RiwWMt5w8Nj5mwTI`Eb4ut)DOE10*k+VF2(QBY-prshLNKO=9L)#m#IoM zr8LOu$MK-=zEGKZMRWDrD#~{hb&dp>&s(1r8eWrtHSbiG;9`mCsKxSo$|+P#lo|wX z5o5f%(f45+r}o+6aoxg(vqe;Y%f-6L?#$o6y;5kxoxNT>9o7^Ti>&j@DE%V=yCc}T zq+VlENsL(LaYI&*6|uw&gG^H5e)5*h*~!A5s~yZcg0*e7_cwUbfgGrQTaVQ!idJ8q ztAN^b6|XX!K7hdjB6r7SegU$77->?~jaZ~#s9m3!Qh0j&n;cRe%7K~jhTJO?Zl5e~ z%KVYLtS;ohPuv(FP5pXWO++NwbYwyLIo5HYXDCeqOqw{?-M7Amp@?9xrGHhIJKs|g zE?c7h1Ns_vKeQRV-`AUQC7=HE#!?JUoc!nc0^et)eGjv5OcF;87QA&J_-_PrW<%>U zve1W29_E{+*xgsnFFt=&3UXV|!P)RWSAGVS%SWn-E!Xi~_ztHNl!-0(2<5@3VS!e} zZ5*1shJN8cYSKKL9#zafvJ`{$8i)COkiCa3VK|6M;>SChQB)nphGIT=oYzl^$8+}1 zakC4K;%Wcm1%Ot5feVpi73$O=e8p{B+ka2HOT9Tph%eEqLDpWkLDFKfRMJj5d> z)wm-{6dXbHDz1C0FJDNl)BM*Vu1vAr6Ux=fghIWB z7|k+L(R9NXRxG4V8B#wDIfL>GQ{2mYs4^%>Pq^uLUl|{^q$B@w;OFM^@o}P`h?J9o}5^M~#!ZluzP|bs93KI-Ei#G+x%( zVcPGvmAJc;Dw{X97;rB6a_J8R-Z4eE`bM*%yqa8+iN?tVSyR3GKz9ApyO_%=3vKoL zn=jBtGv5YzS)=FL76Rzm&?>?zgLwh7s4Vh+PKePwd%NFlVz#tvcbf%2C zJ^lK&EYNR6!ny#C3Mc{NzHV2dr-2H^Yd!$oSAxcDP?28J6E}%YrC~7()Jum-DZltOQW8#`Wrd=u7)Z7U-&NUkS({p>au*hmw%>X<`9!Z0if%c+Qp?_ zMuFt%hbXf0XAU`e$Glpy-o}vsKo*s{=^>Bd^Ia)G#Fz_9_K%T>-=p4y)H2!J{fMt9 zr*6DlnSzH7WWfyYc5Ab(GES9he>%$=r26^O+Y7bw*t~iY)u-eXBo?Lc=yjhP*Y~Dj zsF6_GI#*zR7fgK{%z$3#8&XEG>_63+hQ}kc~A_$mq zkbd@r%%o5fk~e&X&1+S$JR2zIlPqQbQ4)^u*SViizp=rLwT)@Lm=Wn1zs zjQ-Tq#;`PAhtMQJrD_Br$_J>s_=PZ>JL}U59dN6HAmKZ{X>;WuJ%x3`ymJF zQBT(s_4n(W#_(S|aEDBtE9}YU>lcVV*U;GqPHwPjW!r>CTJuTM9A9A8TNv;9hX(?d zvvIsdQU%-ZDNMQ|e25W|6PJ^q7FwIx9>>L=fx-I$k?@n)O{u#TwOXvpn1~=my4`WA zDzW*TAlDCa6Je#JwhO;7l>}y!W7#hk4t0Aw+G873uCP!bT!x%$Ngr?La9IRr%SP9) zQE(2_Ru?04z2E561?9Q28Z@X}RC7jf@wTPlL14<6-Q$>TZqwK0%4spw9rBU>F4Na- z87-A-B@Az_^nQxFtWk?w?44HJQ2~Ks*?0spLK%|Bk$(Zm;=x%=j-&7 zSigVv8XDH==PK$Uh!NuOcg%uv!Z(eZ@q6~#$1kciR>4a+Wdb5Zt;)kP?ah}SujyXl zAYzn&-5J}Y4N-=cZG#~cC7I#@-7M*FgKGWF<;~79UzJs{n3E5GAIn1_laGcVw9y8k zO0k;$_ZS=GyG6H&&!V;$sHk*hUNR+RSCmj{KEiZl+ePfAKU_m%0kgih>Ph;reVk4- zx2V@Q{YWmkCniLRNvnU(eJGn85a{QqQ+)OLEp@J= zszRU;=Jx(6yHwA}pz879Be`$6w9&DlKh@Bz|1?6x3#ML!x3K6953eFw{Aqi>#d6i< zaeTUTba!&RcjrWuSAm`qI(StN*Y6X}TI8s$`f+pk_*UY@__a;SaMQw2`(w3>d>un* z3Q%x(y^3*ZUaCfOb`GnmYTeGsqq}gG4yI}B%_G@H(-8R1y0^QJh%gs2%&UCjC2ww8 z;a8fG<*~>$Q(G3qmqbK5o?(bJzW8D|9Ym8=c8eJ->@K5~V8pKpHbgYQ&@mmLhpP*C z(@ys;{;EeAJlLtFe**c)15XQ#M1c+7KGuD3Lep+3wQjk zr3As)xycNFNJ^Xeo*G&SfQi_(?PcCd*BkMMP`_k`Lw(AccQ-kK2;`O*;sc zJxfc$gte10_q%YCdA7G%$kM(DEj>=z9^hMWb6QEh#YyHFAWK+wKD1OyHD9Q!RoFg{ zF*9y;!1#<~Yx4?FsLzGyDI5mG>Y;g3x-UC@N1_ZAe5T@lH(YaVZb;RAKelkW9G+T$?Vf98;Pu^5>oRr1)x`o*7+I3vU zxosSd(lV;%YVj9lsSYNRg-`xZhF!GkOlAR)1 z_Mo6q?q;)=#c|6Q466f=p^mVMBdFgQ?Ds_R6|CJ4TAB4W6t0kFuzO?hRe&t*bAqah zx&EUH!6#(Z9O<%kleTkuuR*UWrFYJ6jubC;e^B8ATKxCtYhT`93?(v{r%9B5>k0e=RKY$kT`q>;!k|m?D8JO`*UG(+|THZMC}1SF8#)pwNF+6V#T>_ zBbzqDE;WFf21FUV9N-XG7}3z^a5;X&oK3$!hadH3+rcvc2sbSKXcy~=+irWI*H$qJ ztPwnk$Z@<@GSy`3NfQjS_==)w2kN?Jk9AV46p|t>d$V4+Hfh&8QZXxcj<5E18TO2X zu_eCc=))vDsHUoBi!7fkEmETZCi1xjN?U&B4;1@uUTr@8kaE|6-8cSRA2q+HbI85#M|t%SVTc6?FT^@EW0R-%`?0L;yfB ztOreTJ1ho7C;WlDk^j3o<59$86iCF>8W9P@whhw@W@gjPN>hQ%4D5^?zuL0FryMc^ z4bIc?$)EN=6bX1;u${blBU08Uw%hjB&-(dvvZiAj-aCWgbZcYn^pT;C`vm4nK|!+c z=SonSS?kp|5(u^V-)Cc_KoA-oHFmYzT)RgaZS*YMzgpTJ@2{t~c?3$zal*0rc|^%3 z2k@z=HyAxB%Sz?=vLOid^%(w#``ERSWYu&2TYhO)U$qtJW*VHV$UB2CAV% zygHlZB-)6lFaoTizJ^c&?%M@Floh;9L0_;qkGyxyhKcMTqcn0(;1Bq$} zf&Gh~f5ahim)paOFUIU9Urx*pO&0C*0a-+X`$flxmT%@AN4^`X1`T!;@KcorC(&{R zZmYuG7$6GpxijS*>({TF^SD^^3%3o&FbX}eY9}&el_|*H!V@|cBy=4Xlt2-nh%k31YSI0$^M@1~mD?Pn3I2`CR>bSO z8wR=k8~Mo$`e;;fdrKA?yOl2s`Sw*%q@l?>p_H#7jmddIC7PKdy{Ny&ZgNURW>}&Vz~@I*0iy1>kOK z>dXLlKlp@3?fXp}A0Zfi?9Pn)39{cpZIarH5^E7^>Wy2pC*Yli8Oyy62cn@;>dqKi z2a9hu^k>(L8f_v{$aUPkic;2=)8d8h&|96Dfyc$!UtPru^aP#ByH)WEZ!B6|1tEh$ zsw^e2Un7~rsX{nfmtUq1r+2TY*U1g4!~l@IUGTuTxeGUb${H?L8n$ftwJQT-uITCm zSspiyQ*AI|IPa@=O`2S%o@ZN^ls|_i@la4+in(N;`&JBMjadCGyfO>zyKI}l3Xcsc zO&^*$ehDOQ;7O96*ioFE`gzs+5~J%S0TqV!VkXe{GBv;0!KjBT1^H>-S-&_QKKH%B z2Q>t#IyW8#l2Ww;Agi3*w`BWx2-)``0PSfMIs#fwDTt%oWd_ILZjU_G6uqI%8Mq_)k<`z=8GXKYzi_shaD!rykhF+sMFamRmFq#t z6F3KY_cQ;y!Q4%W9{@u9)$!6GHn>}7e!sacEZXeAlimFMTHE8j>zTW)U#w4;e^%qf}8_9yTh)W&`L7P@5*Q5jiuf1Pt(btqb{k_w^aj{y|kYm z`dYs;HAesw3mR_MjD*b>X3);Hs{sap}6N1>$+12 zDoV~r9qf-OFjbB*%j2zE&t@#RdoHGDL(1(K?8*~H{3QeaHFq;UFgyC4>KQ@&wdEjU% zr*^p?HrOIhF54UgOfVsFan*gEW+eHqUeRN@I-_oNr;F z@o!4A(3mseC%CN+4VXx}`BK}~hl0xe%*4W_G!;sHoN%Mer5ZTINGR#!z~X(aH+|H( z*l;p(3V7ws?dKdwY?wo6h63qykb1vTqso^}4wYT&DAEF75nhV8frjEEK5%S%a#%nL zY1Gy%_Hsvu%E04(Rt^~nPqG-pf!0&OBvjmVo3Y^kw&dPL;DSl?80S2Nt+><+{w2K}*s;|^$#(;Y4j z`ofod zo7`t&x`Mtgg*85yT^JDQaNhD$-(0D$tJg3l6ZN_SJZQ9l2;Tv< zjk0`?v$zlDc(iCrF&1jcYXz4kC9(5cPv~L~J${&*8)ac(q#u<~r!14yM08&KQ@(=- zUT|SHJQTi>fHGHwh@jS~O`POk5(d~xe>ips?{`svr;L2w8J(kQIh_Js9vKwrYVmh| zOi&n4`={tCqWC-HtW1fQj)z-kYOebcf7_M>HtscnVL|m%&#l_SKp?=$HMs_s$ZAF~ zbkaULs&f{`PXFA zCp(Om--`urFAOzS|%U`b`BIgCzI9_fnIZ6S*RMNuEDKQXp+yEE7;ld5>(yfqtWMky47ie+8tzGPvE zE~Lt4LK$C2OkYUV8B?4!T}Vxe4&Q*PAqpWWM24=;kNVN4lC1EqU+L^{Jk;ze*cpgAw00+sJ36EyxB?dd6T7-8E?xGY=$T#{d?y}TiN6F|!cw-ldl1x%QyDtP>OsU|Z&3cLN)c<}+ zlK?^fJQfJf{sC?5nJUSj->TnLQ@?{B@nk@G%m5+^wUfMHcb`sP`J*FFqEQ|W${3_L zG_L+t;b6pqW_zrDx_WVfVvwpUI|;yB6OA!nqdqXtWvvC|SAgysQh3Rs$7djkb6M&8 znmq>gfea<*a_2iCQLK-@xEPtH`xIue7bG$i}Nlm?3eFJ+8dxZ;TO`%U^~kA6%lBm0m0_*FqqJRsF*wQd3)#c4bxM z3s0!67uIUoD`J|I9^^sb$*Id>43Mr!UolB9&+J@1nc4|v;Rmd{BbTSl6fNs`OIG7%?QBR)p0 z+*Xdj{@s(y6g2yy*ZPyHFnk=KR9UkyJCc!WQC#r82GjZ@EpGL2IKS8ffIfQgt%B!f&Upfnt{|M5D@n`9 z{kF3%cKzf@y6xtp!NH?B+(j?-@R{HHf0H%U0h4tX`lyAKq+kbq8*ipv0ZCuKotWKc z-z!iT60AP>D<+n1s`&0*%Y5X&MkKB`0ufx;q)DNPJ$T-JMzslMDcCn9FJCFxVE0<- z*;&kGy=@fH$qpny_#NuO?#AwWaU=Vq(`8f=7gVp2hi7}xrvaEjW}R@FiY(rfv1!Tw z-H|NT?M>?LeLI=mRLQ=mIN*f^VezeZ)g+5Lcl%eLEZ~Y_0NEOuC#}Id`;*9)oL8rx zqL+d+V|6d^CF2gg-mr^;LsrX>i`cfG)vUA(r~wiF4&^tondpSs_8KOZE%iq&X36vFE7? zJpbzO8d0b9>62=WlP~ae&C&lv4nY5|;ZSh9nUwf<&^JhZ4$?mHO_JH-^UVgs7v~th zcjsReDkb|AZwh|&t)u?qAmEh&zK@_+P}^}gu}z(pIc7#KydgwHn?CS@1T>W|>C0*? z3AzP`Svkt;yw|5EsK`mz))fBrQYGbVfeR0PlAHdsv(M7mwpWwpp&OMzpa1@%ru`-T za1E9nm?+H}{<|GYhOK)i3;*0Pt}%xayfV8|=u3{IS%QuSfw*J5V607zSz-#x?CET# zSH7U-&p77R>@#9FCvM3#8&Z2zGFHL#xMXy9*A=s=>D{3D5UmNRmK~$Hh;z+7A|zF_ z+#!Y`jgQdBR>O}vCS9=WsWtQ)N@LxJ-K5%oPeJ}{GKe&ncs@bbYNOMD*gXtaeQS;x zNzb1VpOgJY^{6Gxbvr1Aox!FLH37r9O$VIE^s18ri&mJEhDRKZg{GW!wk2e;&Z?QM z7`c-@c)Z6(I%HnH*w8= zL&g3ku+<$jG^x8MgPb#dpO!_3m%lkFGmxUucmff1k#u9@@e|2 zII`t(i&`z9ZU${=|J*#9iXDNLdYgQnJOPH?J`fDEHT$4V!nLu_KPLF>QE6JZBz^Ld z*9VM0RaGhmPn*Z>%VnXp9u~b)9=lWdwN+K09^G1O6Zll_(41{gV^DQ=j&& zJ9++oeSKwVJ4$H&i^KW;CQ zEGv%X2wu<^xhV&szsQp(DI{`&zKvCU`pd(2)`jK$^YXv;&(kWG<<4O&_>tVtTGKRl znEsh>>bXxK-KK9EC?hld@pBu`9lSZZxJajp`6pZM49^bLn^fn$z0Ny!>J~cc4O-}k zG2s1gy6LeZK6VsT(@4Nmrgi4iAY#wS2pToq`{iqNYy@^%+6T*4`{Hy>G20O`Oz@U= zcWZ9`wII>yG=O98@2A;5v_+9~-ivgBm?ShDBX|*EI{r;XHWMi^N%*fjG+HJw8?yPy zkFT~WYn_FDVv+c?ZN+V*h+kspWe8vhkB>W^?P>VwW}8hVg#-S81l(`6e)A0p;F$LB1^#S2Jv+?g9t|y5?^PD2}PtCEF+e?-)YM+ls zhXn8uqCU!h+rK;aeb(}u#?(!#5T@IknZ^)k+k@*_bx1J6wXs-l>~&3vFUhvq)3BIGa8;NPP!#TsL2Q z8~G1G+&l%S6IP7XjjUG|1(Gbg2U(WB$2I2h{FncW9q0V zhRmDK#$!i!T!VH;DWW|-jN#VWq980SFJo*?Hb!egA9}ijVOV}J#db3&$5ODS%82hnT>b|M zc#%KaB>fg!Fb_+$4$i)EPL*`xrt#0jkcD;hRa3u$9UXbu{`;QYpWF~=L;el`Qp?DB zg-RURAX9Y8=ocGJ9xN*x^Fu5FUt)h%EtZ|}-$Nn|EE{sA*Uv)ivm`To|A`;u#2}FU z^LLPd|97~%1M+exUiN1&9V;+hP%8J$q+7F0SQ#4!Lh=djTU#w369frjFf5n9<#Pck z+MP6`BY;x_j+D`1#EN-z-Xnjhh}gx2J^fs=KZBz!4;hoc3~jKgwyM^6*~4S{2-W5> zT^`+owLOqyV{K8D;q?aiuXC7j4}M(d;aIG4dk~W|*K`m)e3L?vY3{Pa zn|j`KU-vLuO_~v6G}%0&@~sGdSbcbZ&ISSxGzLD`hKo5_cC2#;K!Lzf8z-gM#py-q zl^N_shwDdD)FC9Ku@*&Ptk}q|!UIuS{K%FGu;uKw#u^wfGMZ2L5Bwq3gfi$qy53iM z+%8N_?(i?j*r*dhjE27$W~;n>x&Hky(+?Gpf8;DfErYNgXsyY4PB6%ml$p-oH#)qs zi~J$xSY$!r;ZHo~j7M0Xy4?sV?TACNhW)y8Z`|@}x~%2>YN^^p9G_=W3M(C*>%RS5 z>9Xm@m?g}7MP4ZvGF`}mwM7(s+~Sq1Xo>k{Q<;XB^qrTe*v&4k?hM6X@;7}g`GP=R z$^Q9pC*ZIN12E=P+^28%whjdBmKK=FY`X;MkT{3@B##S`}{CGOgD_FQ`OUYA#AdebB-6n>DgnfjJ|E2aYKMyQ`yiy+>%b zsiYNU#l++<=}W0w?3T*98`G*0i4iebErp%9xpmE=M9imudA0QsN@q|o z{@x~9GRmWY0f*8;yd*EMAqG@>M&MVXG5WuIch$a62$I^@YH``0Kl6c|@wooz%WxK) z;_3HrJhHCtrU*IqLj$@@TsE{QF~!rGbxfsKDJc)HN3E24?ovtf z+HM@(_(wO9c;$91n_l?MHiq&bS>0rJSkC+`%C??&wKUEB#`WDBSHe~B+hJyFN>(>oJmZJ(3N#RwbXPs@&uHFnI9lGkf)PJkM8>=DH5Q+X$9N>%=&6#uf2b>d6>&Np!Ee~t zbGx&yg$X~$x^dlOtk;JeWtquSy50AkZ3TWnLd|s#$BCO|?*i2dft2{bb#FK0ta+?x z!&=*#N#S$9*jWS!0B!xlgEc55Zj|y;po+5cEmyQmKH_BHQFsO%L_pJxQE*f)1*Rcd z)**;{0_mxN-Mb{}E0aU~p#=XCk7 zS;;O>U%(#bO(-sYcDOc!o8D{m-p4&E>d;UA;Jg!=JcrgDtyC%e+FIjZX<4?Ky695c z1;S z@u4BHlo|u~9w)T+oAZ0dgYAjF+zZ~*)4uF!EuT6^B*-r8{sE=zt!s+@kS$c!ydT=W zmG3rbhAk<~8Jc>vG8iicap%KdDr}2CtdU73>fWjhVG)C>Q4TgNfJtw3lJp$} zVn{<%K4Ht18mIX-=tM_hkzv82R?DH~C(#pfc8kcorjNgDHukvvS+}BP)`&@@il-*z z??5b^`t(TEKpUaDXXQRfh%0rd9(+am=i3mFWHr|6NApz^nGvZv|!DLeEsmFw{b6G7F!VX(SL4Ta5o5eh6xLZV~ z(4Ac-#dnW^gb)n)|0vm+O8*+VHG$qHUn#%-QyMz@VecB7Df_^!yU4n2wDq)Pw1w9pw0qr`9nG!M? z*WyeaUzSzg7nFiQV8-TVmw1g_($tFT+|+MEQarm6f&wm&76gIv$Hzu+5V{Nk*|U|4=`pJ^#Q9#dTIw0k-bWr=j?DA zBjE=7$Rp!uHcxtZ3tmVqibu_6qc2+IPf$rJ3jNtv`;$ml+b?>giWwkC+I64vw^e}m z)|Q?#ogG9z5w3+&L%)8Ga<>pc29%>IY!Ttvxz}xtzj%SoM#b`$PWZlFcwdh((1)hA z7%WEYo7`8xG7vGS<#wKSV4JV$dLas5>xrEH`ZZ|Gwf~(lXF*hsiRx};IDM+gA!nDqFJ zt&JQGBnV&y)hN2aEU^(o9o%{H98`e#G^m8yw}}?>_{kl)R(U}oy*CSXX|g;%;?dmq zlbMZe(DP0*ILHP8@^4SbpA2CzKLXtMixUbQCXwKc*31^M78alQ$ce0ZGiF40_JBabp+RU*VW$JpDe`7&con>MOC?V6SnKAIYJLamB)AUT| z?HgCUMn5W04v+w@JD=y)IQGw~c#8LBLM(8oIJShGc9CaX^_~@5@kg53n$TMyUoVLH z+WN;vf06{g73iiy@woA3C2!R3M3cs4BcYAR*Y250Nj8Gz%6QjBv*nN6rh*6ofU$sd zbl@h7?G4fioZKCE;odg2bZ>B?IBt}$byARak4$kcRjcQ#6SKvnI0$Cj!Kkad=aY;0 zz)P_E`Sa0P{W0~#>Fz5g8R47af;|Gzj2x0 zcUFF-1&X^^sLgFF?os}iwl=R{Z@Au5RJSlI8{Ib9ua@7z09x380u~o=Ot09uyB0qP zc!hQM!MJd}<%-io#fQ<{3i5oAJnw$Fv6&MamK#RG#P9k)Y_>5an;p;ho?jV*GqibQ z9{%%Zthm$cb)BXemxB;I^U7@%6}XI9zknP8@+SBGDlV1XrWt2JJ!ui2Ob9eQTzFnP zgqIR_Xz0ow9AKUB!(xyNE7;rHIbnPP3mww_wY#gPaU^5K0hPaAjO9Cz1(lbrE0+DQ z!JPOt+BnYczX(Wzf$PB0VK7<-G^RU&zE|YfC&)hTf4_jSb2e|Zt$^Nv?YZtbWRMVn z(I1}9mM%gmznRh-ye2dbfO&lOSFJ`oPHoR@q7RWho9+PFohoY*>0&jBSB-7#OL0CI zREDZkC!pLBDCw6ibX}_Seh|4En$kM?!%D3v(H55${FKJO{R4mHb6vI+*o4mkY){CJ>11NM?^A?9#DTsN~R%L#qjO(RNm4vu1-m?Zqn~%L5HL zpzRGxyj)1tBlb7EG%(VI zDefD)XyA!$5`TJiQ+CEXY{8Xd?Nd7YW=GSFgCFv@_Ek!sYo3J}op8BVar4#IWHUR_ z;r2WQ=45H5FB33nfm3Uoc#i?sJtTg*+0a0Q+TH6_deBj+@9?dcq#SI7wBKZS z=V!k>a?1i{J4~P*IWDz?Q==MXND3S+azY1krF4y=+Zlxc`2v!$Svt*)ln`jr;!ORD z5GI%bdezklW26wgzs6?LB>WUM!0KuS8EdRgC+=)8G!tGo!6 zDRV_dYMrk~$olOk?ciswNa2z?g~x=^UX*jNieGGT(1wRI%A5xnrh})xD>&=a z4I8xs3BGUp7OiOF%4mbFK+Rh_`PEM7>BD+l5L+&z6!9frw9~T}zEJz|0B}hup{ee6 z#T#_s2Qpk-`E~}(jTv)Gz_!CdRB^x7eqB^q!vbOK=39USbXcw^pj{GmMPy%?I8CON zJ$_6NbW)|%0`EDb+p${=x7<4j-q=sF#@bIZv(dv}e%W-~FuYF5;Obn3%^v`RrNCk_ zq`C3*l?_pysqe7hDLhW<8PJ(VPZpt& zaE5A`Y(J^D1Ou$kj&EBo1+nn{)*0)sfUE+M6Z3!A)IFCEXrP?vwRxA&FfNqZ7PwFK_W95nS62ov98V+%D?bUZsQ#m@x`$bAh2&H9sRL5HL_9OqH zKLj`jog43)<%>10PL7!!;mkBM1HvQlzNo30 zd_+AjYi%o+j+wdd#$#>GnqmvOI;*IC?ZCsYCd|YNC?2?@+4qDv4d&UO8gy*=PsuWCt5O`oaYa*E?7K8)=p&wA1-?e|GIFeQhHZ$FY`P$x{ z;sNs9#s#xnH>gDupk(UeVPINuFK1GkoH0j4KpKGrhjm7> zSE0s<-JB{9S7iY`Hdgot@Cs8T6c@Y&)Ff*AKtTC-i|@@eA|!y%!GRwYLdhQgVTyjv z`rI9TbNCOJE*lFQXYd<3ADMAQPyqrw^=K=I)7EU!b|#`I7`00?r_fo~Fog?4ndA18 zS}lN{d%~WGK)B*AktcEa6d~Q0%s%#lZEo8h&7O>iBGU7L@~Mqvb#=Gi?VM)l#^B7VCdi##+Rn&#XnuxSty)=$mb>-p0Oni6#gs0*H1j~903PsIAcIl)3)6>n ze0N`T7k-8Vnb)b4XsX~!Z*d#aaNGIQqe~0{>V9u?AN8f>Ge28b`*C7hN!_K}=omME z9YBSM_Z%0`#aGXTSK}7uDv@>gkTwt@Bu>sMMN?8wH)BW1hRoT1R0<9`TOTTS9 zlVK3MaYkW=wM7^oO`efaPt_cDV+`AI2Ai5@{2%PSRajg>(=IwV!66XbEeTG7yM!b_ zkPsj^1b26LNbsP+A$X7kADjv9!QFL$K>`B|Fnf{j|M&Cko4wCDSLb5)9S>{P>aMP? zuJ^5~Zp^dZ%e`ZAMJ4b!y5hK|bTLDp6Oa~A0f~%tR4(}@vx|*o) z_oWeW#SC)y?ds8MT=QarJnwL&Rm*TH{3J7GWmOq zFc6Ss^?j2oJOB3#E)WP5P8GWblCH_kU7IKaY$*_US#0R6t>ILxql!ccG9~ABQoz$g z$5_wt8H{T`Fm?~&0h}Lzj{3WIlmA}}Tq1DAWcNrhmH`b^M;L!nQ}jC%@V-ABxHqK- z(gM3{*D4l#QxQdIeFr*OT754_a=(<{pl{33ui%kHUY9X``TcHG8qjZ^G@n}6graB1&lHRp8yzFqeUzEy_( z2>W;IH1fnHYs!p2^MQr}!X~{&h)TW8@)X4fY_lI4k@uQQGWXJq2?rm5r=+R2UWF0@XdhRee7~&40IC*R z%W>lVRL~9!_lVF?d`!@1=FPPMxa6R9mX#|4R z3SJUqw0~&Y78`_dFCul3yqLfF*1x;GeA*_TDGiZ8G&zim&PrS%&RYf!Jy> zD7CqLSqD|a&TEypEi9m^iAXJuX`7#1Tx0;d*_mOh?NpSPC)4nvZr5_wx^^ku#fE2+ z`;nghhdY*D(cG_S?=(}6K32@G_i=SMK1KgA#Afrn`iF~`VOc6-Q&M`m3Em&)IEe{# zZ+d)Wjff8~qQ||*R-@RtA>{mb@s^p2rKcI3POWMG4SsxlWVOD@VgN5tnD=@#M+Qip zvzd-qOuXy3!(m6N*bG?-o`@3#-kW#>2`?)BR(g)HJJ_Ul2; zgQGr?R{M@!`ldQRjmRe`VdZP~Il|QV>wL!HXmR9SKu+JT3EK_q~)&QyOy%`;P;Km3$J_p%P=x)Oz0;hQo(a>G%Uu3n;bb>{~ zGVh5bSzkWIK3yk{!hWP6|K=%ENXSo*F_mSXZTz>yGXaNU>@b{1rO54-I_Q*rTusI| zOkH8PN`&CJEmNV%GT3wpWcuV@Q_N#ei|NW0sACsDf4$_no~&!IYdqupW6?kWe|XkH zuJ@;D>dps*2-n-2#RUPc+s_p$nTB&tCB&fjie@`IQ+ld5iCK1#_WlHLJaE@>%UB1< zRl8cYXCUw3$9CXmv1^}zS3&Q|nsL@dQ3W?q-T649U&o=U-zkF0AVH`l4@7@2NJwbt zqElJ5eH+8|jHl9|D7sX~6ckj7GN*Z09a!|Fje2zVLphi2m54z52qk6HW_BaqxgCOq z-E^a~>&-hA8AYi4WYE+ytZmW?C1~M2wG~dp`Q5=vl|+OmOud9(tWj{t1|+9ia$Xxk zrLx;#&v!AL7;#5VNFA3|fO=bB^nCYsOULjlM3b<-xLsJ^Y)1Vj46}M{weaP*zK+{g zEq88CuAYJmD4(+vgr&pI?(OL#u=2U|T~$@m&Px||{+Hip+SW%Ge*fM$OSBaEb-t)c z#)JIJ{Gd=i&sHBlXb2=trDsdVa&1BTz69!)-e~cz5)CxU+)G+?N-{yBzG?C>ULY@Z z-{TX)*< zO~SpH#s^*3)I%_~=eNk*q;VX$s&)Dy6eZw>Zsl7sj@stw&-s&uk;aWbyE$DsHXJ76 zH$A&=Vm|kdJ1k2)cbpmQSf<|!&fvJ0v_}O62L$X*v`YvUmxx_fuGV6iY9MBnuBV4E zg6?U>W}*CsGJbl<$*Bj2+Hkv6{MJ{OvC4;j< zx~bB#+=^;GU!k)1kT!9(12!`MB#+IB;_Rbo_BSeh(nml;}<$Iu$MYXH99 zNbwLm_JZ3lpQRt$y;;?OFR^&51_qxd$3bwSftlb-Jm!)bh>y`5B3ig zopIb)+NWrg|u9Op9Q{+ziq>MYtav*}CGTwhsF)!6PY^z^O+fz-}=o{qir zT>~}!80+e`&-8TnLIsfSrBuMW;VfU)x%RDp2B^NOp}JW^nVSh^(86)kjYH872#QEiC! zmmflqa}iv>Qf;a!`{BEZQJ&Ee)p6f>g(!!gonW<5QP`ea4>3p{pFyCg&;c>Vl_?<_ zcz3)U2ZZzT?CtE@Mohm?esyI-B!`8umA}2)-zI&j*JNT4`oo5Zm?VUII`x2)i>I#u z9}2w_1&s?Tbg01NLb0f}L_dh3iFpcgDtYb6KXLGey33)ywj}cq+o{qiTlAcdO0=S; zhMoqF%H3OXSFc)QyekVVQHfwY=NY~lJ-Tye?P#K?9=>_o2s>;qvm&R~2F`XSr=o}o zjFD1pgcaqsN3ze0d(eq~A9}1#{`%L&5cEn1Sjyv0NBV{H! z-7ILqZB)D{S*s1x<*GqZb_b=|Km3FICR|}y>>S3Llg}qb0wEKTa=&N9-I_>_MMc(!A75&=W9;G_0 zY+cUvKG}lB4_M+&9Qk_OG=2n&RQ9?)Kk^thY^M2|Tex6KgbNqyX)o{Eul_XkGT({k4pgpIT@4*F@*;>2 zy!H+?pR2BDVau*o8rIS^j00ACNTy*R*(#?&2(Z&7tY_HtkA^a-x3cJ;9b7XI(A=?2N*)akUQc9zU8{^^b-grk%vncqg%Nq@Ax{P2H zB`jCF@8xn?=ScTdI{v<|(?Bh2^s;H!}8DH*(;s(-NDnFAhR$ zZA3nuz+-Q$RYx@_qxb%H0ZHBX-vj$pnyi<+TH#QhIR6JHfy2YDufA}si_!qh#Kwuo zy1hN;y~nWgLpR(J`LQ5#GmC;OypOx%*Ryg}E@kD9!fbd{VImwAI}9DbE3Qn>En<#l z3GF|9A_lUO9%Eqi3xA7#MJ-epwZ1!HVsZr&nB6#rugDa&J@;y0vH!{~lb10w!6)p- zG;~&f&$llA-Tbhp+vXgJTxlRPr3(O=-dsWw9F`}F+->EkuE5@EV_g&R z(znNj^P>^M57&8p{pz9&Ib|AKj*fM;*(P?}oosnus7JX`=GZQuxd)U%=?Dx$(*$hQ z40ZPB=$48&Hf%%)5y|J61b;AMi8Uj= zG~;1WbQ0sDqQ$*u4^GR&`(vxye?U=;fE$Ony$&Az-A{;7Z$sp1Kxd)|a6^+x=C7~k z0`?f5*e?ZfAucIVvmQjYb{xp`{AFDg&ny>xCO%Re$+UF0BIeFrQBy~n%b6ap`egXE znH6?~+kQlq{Q_WhNz~cDsy<03W!~IVw^LT_TU~6RQXpXMV_(aFR#mvuKCv&wF}|K7 z#oOx#fhP7pqksx9|BjvW{7yWi88*}raC3E4z&u9op*8OIzLH;S6GKMxmat+(+nlW> zEB&iRR6Q=?sV`SvJ)-WFKmM!&P*nY zih6Pa9O?jrenCk7xT(5serx4kJ_}zTlkDZO)e^hZUeFxYYr#C9sVcksQfgsVZHRpa z_iRmFovG|$)~F83jlV#$YA+ml{rxhgPj`N%?Ek)QC7&c;ECvV zWkJEo@sa1oU|Npnuw_+UMH95G@$?9dP)utfy;(N*yW>V4_~IUI0f~m`x&OJNB9BN1s1SrLouDrb9r{x&W+#Zp(i z;D!_PrBdgf*9se716Lvi`DT)YFs`oGF9D7Y0*NoE>~Z}z*mRG^3oAoz6y!tPqERZo zJi_g}*p2Bau*~UWg~Xm4SE5gbpxvG>a5ZiWR8+~>HSJ0?@tLeQ?Ho>6(d(U$uQ3D$ z#+0YJoyg0e1YE53NeWc0(2A~ILmkFhJ-7gme8jQ!^QL#Ru+y#Z%+boxRc1#u&*+g* zo?(^5fVDu-we8HF&$=qs@LivohbPiL5G(|uK=f;w?@NKS;Cd2myn_q@pE5s zaZcEPc9R+K-b@W%g@+v+j6eJ79%{f*q*-vRdmQ0Ia4Jy+KZue6y{!I!)>y|!WK7zYQ7^$R3ET5k5U1luW^l$W7sHtkyO52|^4A+)hyQQb3hhe$|C zi`+Qlmq(9e;1i{K_NdZrm3p@oGe3Cd^`t)E87Babge|J|;{dz)>F78cj0Y>hW&}K! zNSSP~aW|0Rwp3J6gtwGNKp)&(QC@88_tT4b)Kk%G?{mRZFuW6|0zR*UjH8_pEG}nn z~RJN==(*f<5VZotJFWLJNg3czxaVJ{jNFubLDXS+b|N?k3NUvy_{$jw$_pAvS;r5S$C z?2CmQrf}4_owNN|Y1R;`ED)v749PhXGuB6RYIHb#@z3_=;2+TdrxVUU$!W^UJC^5GE$MvAYqCWXj$ z@u~DH^4_|$pPjp-^w;DW$Iz~MgDO-b&BDYaOhaw3XGqj1Ni5qv+pHUi1qKLh-cKW0 zk|j*@Xebi5eMT&`OWqJ3GZxATbK}WT_jQp_>KYtBhNZ@bd zuP;i=-aPxsevZvw>zZJY9?yUTiWkH4O)KQnV!4@AH5ZQFET#oB&(GIXc!Z&27MRL1 zE%-FGYA9Wt4bz{FE{Vh)Rl@>_0LQyZQ|G-R&Ctjz2X$hXFSzT>cG&nhvY*`CVwp*n zn%DN(-Q^;su6O+@RtjHN4l;=@lcuGtk4JNwkw01UQlTTw3`rm7l@AYR;(JadbANw~ zYaRSOS@>J#m#l77;L-fL0#r#s48Wp9VdE%Nl-}T*zHR^Jd~}3starrJZ%s8Z z5hCvjhCn1H?YkNMM~n$X zl+j^6elt^Bz@Yt_z4A@P`d8^^b4JV~qg6bIQKaSNOS0=*JWY;{4dRY~keWaX9mM$# zQRrB|Hr#-QIBnCg2xKaVI9rO@2U|Z8ildFRA}n?wQXc{+En;E;TfirREEu%V4COc1 zLQfc%&bxfU{@U?W%S4(Lj~UVF+U?4@9lL5LJEqO$*qLmw0g#ukH_sx;PXeHc*)Rd7 zg5K_DAbI(zAC(L3v}40p1c~{K6!V4TH=YMEhHu}#iHTA8=tT5DrE&MB`cr3Uovr0$7~mf$>BQ;cH5#Om(|virL? zjr_|N;agnNJprczUk`yZM`|} zCA}0T=wq!ddCvU~hq(8&z&rgH4_cI(vXLre^jgOGr|Ie|NQ9}|YCHERQ^Dk;5)cvb zgmszTqQ}*K;_Hdx;8e#SJb-3-Z5Qjg4SD+mctbJ+srPv1bCik@fD@0sVFri7AM0hc z6=gZZMK3N^V@9^;cXWX(ifuQb?kOT_a*r_eEFboVK_Y_<6!J73be8Si2 z))}U`L_yD4(=b^IL| zd0E9D=gP{I3Z>`LCU!0>tBgp8dbC1?2GmN(d*`2RKCP=kN8k zBD&x!m5bz*Bq35cfdhZ8k$1<`eu%#J%k2O_!Xzi4Jh)r}tTP2-4+8e5-FVE4_{q*` zkoj+BYnIdf8^4?I#T6nc;fh)U0mTHvid~M<11b=w(pppOES#t#u zCB_j2byQb77zIr>a0Ez7Mx5J!9{JOZ6Sl6uYya=Z<+OnMykQ5dTIOm%^~|o6%lx+2 z)~_L3cZUj~yy$=S>cPnTYyVfX05SakK?r`|z^L*ovLCikigex?;4|59reF|}g}*bv zWdbZ|5|vPES;aWh1FiM(%(K&DYqq!w<6&*9e<|t3$H1!djq{B8|6&ZZQ5bA&+ELUZ zR;Q-w847&Z;FS@|MiW!h59;UwSeVj*N1c5gWgIf{7w^`QpP~U%?7dF<|J)p<=Rb6- zKNJ16%)aMOhcxj-lxFvU$C~}VQ72kNZS4D(1BGG?p4|2YRK$sm0CWEE^p~hk?5Ake z<40dxkYT`u>Jj8rmQC=39V5WF6(~UdkLpYCuYUy!41|sf+lx0WL8U*ez6}y3Y6AhN zhwhZ}m3rjW<(1jW$QwQ&qyMy45C8*+iR)k9cM;K!23bv`F$t4kW1}6CNx$Vb+ac&j zgEFd`O|{=p^H3^#>^%5oCDI>FNj?MWqvf4Sw7W@T6? zlKqYs8fAxtjY4WJN4JXR-p7>3JM^(wjX@MI#A=a{>7`bMkvXNAA=_5|y9D1@`GwoHRueyG*3A~I7Lysy>QKLR;9gvB4)WsqnhGwW(|`o zecv&OJ}OxJ7&T|sxr3YGiGC20X2Z?%v*dCmvKLh*QTo**vo1ac4%&m9j^GU%_{A^j z-|iuM;y6sVThQvK;lmPm-cD#+D=9&`IxQ}y8Vv6vva28Vqy=R6+W@IuQSE zkk)aqyR>;T)Wr{tv0&lAV<1VK0a;q_^HeZurj2odFbF#uBZLX`tR5B+z-HEK)6cAj zQl|S{MV=i+GC)&oP>I2n;DbFP^5L^l?}D+T(Z1v8WZ~1yW4gn0FD|_+)IWxJtTL;v z!1LZn|3oRePMAjke_S|pzyxq&LzDRNqV_?=H27DTS$;FwLE^1sE~_?qd-6w6dMZLz zZ)~DWJYrIeD-6`4@PwZEgp5UoNo`Qk^~~TIZb+0};Ueoff5j;LH1rJ>-b59>ycNyb zCL!srRW++A*;X)qsuX!KYImL+70PemlI`dTNugKs3D>XdcV!KRN)#Xu`?b0fAQ1Db zcWU4`6ub7~$f)B@5a?`C zI%^)$+vTy10Q(>0!sHC@1mtCfo^I0f-IhwYb8Qdn$4f5Lun&~}#OdlC_ zWaUHfo(J`_tFh5voUJM;n;&UMXN_)fHHU00MC&d?5~eJrJGs1>9z+BcbYqD9Aysbq zCH29XYS6EOZ$_>09jEoxS2PE!^k|YOY6{Dbie}OGbbkq;Yst^@)x&uuGqoH5+nAMa`)7-s2X$qd$lK?LdDjOn}&!W$;9rw=H?x<;AqpG6@#--k5IL zCy1qWwr`EtpfI^MaI9b^m*@UGNHxm8zijm|8UxcRj-7%##HuEJ`PDqqE+v0t6G(%r zJoVY1LW*VFDA1P?W?INIrc!8oB=~?j@7L7r+|_7}!Ob^^T<|q{>A8`U4&fE1^u8xw z@_m{OO7*t`=iYkhiASXz9bw-D`|KefMPSYbK`L}Bc!)+%kZiC^uYek+@-$V?=4QYv9qc||dfTPh=(cax zGHLe5XsRJ^KxbEU&V#0RVUgLn`8%;B0M#j~YudaijFUVDCwGxewQ@v;`~@yoKK%Gd(j@ z%+~eK@^J;dkwlZ64MZvGyP~2-f;TrsyFBg#O{kGkMVl!rUrkz$Uqm_VrVHNVZ#N#G zEcWo|aj!ZBM7}U?`71El8#pb4lWh3dB=fdz2C-a4Z>QbnxNtT5=yRC;pRYs)eO_86 z4!fNm>*vL>fwc5ePw%)R#_Dr31g)0OP9u*(Ec5<~5()@7(Ds9z7XT@pv6^X`r!Gs(BTulM_xZ#VDM__dr>cqC)QM-re zq)(6Pqv+tHLC^fZIc5uL&L@Ns{AXHEO<|roS$}-_?X+9bCQNvj+EqbopKXP5E%0--o7f4i8 z`OzyPl{i1FU=61GnzS6^!&eJ6wml_r8*HVfL&WOyvJjx;7f6%;2#$Y8a*o>@wL)O6p)iQ z61a}|iz0B8?$BHXMm}ALpyLaS1hGEc~J&U{V zHC{)q8{}r!q}xBl7(OK#e2@>e@9xO|+uZ3R0b*bvZHQKtc9R@BdnS82F?zbzrsktE zj%sMJ%}N-~f0OG#Em6D+9Mfw5N8{oTHi`XBr{D!X!^_cg*TMye2XO51GvVX8-X+`# z!RZ_Kg<@HeO)Aw2OGB-a7x^(@$a|tcP7Z$Lb8Hg(XZnCK)Me=T3KIc66@1F<7|DO) zLf&`P>CsiI=A(M~!|wHUqYR~Yg=LAml(ub_uXNo$+F~*}3O#ujEr`jS!j4=V_bL+5 zRJu(QG1#fDzh|lQNj4)BlTEL@7sp($gq58#X?76Uq*$=Ab}d~{N>KEX;wmn%XCDl` zp~Ng2Gb^RGq#RPEw7B1F6UI8Yp=X3w1#u3C0NXuqMa%;vCXUc*ms>ptJ&=Gsf!U~xG&V5tYM6%3 zE=^7XfR=${{zS;ORA@jAHK~g*)Mk;OX3ruINB`U4O0g)e1uzKt6vcZFr!OoXWtj$J*o z2qni`3?Iuso3ss1(uZGsh8|uu0)C16B&U6UZMhA+akYo&86->PeI8yb&7~32R=1en zsa!Do_kMl3wJYF*MGQ;)*p7O0h*E?sB04p2Ghbx8rM}EUON}tp&TU|dKc0$!)6b*o z7@`ud;1n#1sJ-V2b7fGbbYneSd?K8NY5sd8LZM)IiJ;k;Z7Z=dp{JQBcwJf<>#0Pq z91Gz@+IYEGGTmmr!?&Y`V*#AcQ&jIqWq76u+@a$A_PC6$gs5r^1q-tK0o?Ml6E{9s z3ep*WNe_*dKRts5zuCjgx*Z;9g(OsTOuKwe-jWIHm35;*F<&E#Sj$cl#W)zOw#m9N zj#kUV>!2KkC5r#j^!1>fFP~=SrHOhsaqJt}b;G0!kN9KHImOjM^+lBu(|kN7;tsIc z{js%mw~VabL?(}bQ8QrbV-l|@B1I#Yv6}&oo)H$@K<(zhFRpt3sF=^y&vd~o3E8%0vNoB zkgUN?s&_MSba7z|;P0sk4Wsin%4sD_%hyHh@ewGFJpu|A_JnB$qcZcj{rmnv^Ll&! zIS?<3W7=oODNo$aQ4$+ZrVNusZeM)NrG9By=^;FCh2hM#cA zNhm^%>QG9mSm}iIi)+xKe~qINy7}D8r=eBjX{)<_{j?o&bW2u8;bkc`V}2C;rkc|N zif46UHrE+7@9yKTt%sz-!M7Uz{YAH0(Nw574)dx+7V-tG^=bK+Pu4(mfSr$U~EmtR+ zKs)k7DTz`gMYb=lb<+JatAG&{mOmRWe+&pr9H&FValald=mue{CUpC~!(rml(S2Ot z+_~{ar^1OMbcQC0q-v8wlIQDi#YrHMHM+s#R4UXJ;hswS~%o>IUC%3{Ocus zzX%8zQC^leM>0y<{*X!=cBvew(_@BK+9^4}(i-LP1$~HktBz4eNMQ|35ACOWyP$dT z!ni;2{FA{1C@^u0cdt}2)GsR|{AUWmm~EURvs6Q`8zCNDt~ckmAq*tG(#J;7`7b{d z!g{E`F43-{Av`_zlT<{H5u>btzh$2Ondv<+2}-4mO~EOVpt!Z4Ble8uUf{> zKZV;&(_H6uTlu5aoAQ`7=5#u-?#qp6kgPI<^4ipU4KE;X($nnZRxJA_z8lFDCgkbroM$V5wPH-9qE$8mTUg zV!K`x!C4p>rWSf|YwIu_@Y}BW{ll$kvtWgN$}nH>5E=UvSuXX8Bk|(IxR)k4mTwCy zA@UDitVQHL`*+Dtvs+*+pH_KR9;ck;wNka9q2u2Y>`emzYCMowV*#Q`4 zK%dk$*5nAMuZo-(Lv~^WfuQHw(-%@8jN>#@jWmBV?9QnEhKLC)f#*CS{-c}HI{Ev# zuVl65bPf(2N7CefRQ)17I2+4S3f;3{r7Q^3KW&D}oyC6(xD0b;7Gl6t1g!p_MxG#X zFvOmAszZ|ZP}3&81g30okpdly)U`H@>~y+o7}zzcQ^M%hNB=sAqkerr@uA&Ms~)MK zW3(}H?IY1kS;AmvkjLe{8?khk*6_j9y**e>R1AxBxQsgB$7$_;Q*UBrZ}upBLc;rA}9r9S2^>!#yjty4KC5tF1zm` za3vPPQ{hOEfcSqrz-JT1)h4+%e`V>lG49*eD7cj<#0Cqp-9d?XqvNZGD;LO8)5O^PGma(@FH zEaFT}VXQ#R zde=$TfgSPfN=R<9pEqiHzL9$`z<#X23)+ELEXOLBa=IEhyi>%j>kNDKKn2|Rxr3PU zmGEDSWkYY7=cBEb^*ZD#(a|}D#LW$h9SB;Y6#r0JR{4l=32OcLYejuP2qH6|p&o6H z3_5T<@ke81A{n8El_YPWl06|A*U8YXT*!CGGN);$Au4CZqH(T$!Xn(Hu7v01)^cf3 zn}Gg@%0#~_zqP-0whhx|RrjbxUxF{Qr-;!!pcr8)o}TNF(S~ZXKx0~b7sSie*PYQQ zyF-gfU>`EPgp$$xt_9|hqr;S6!6AxH`Na}ep<5r!2#q+O z{*s5px5=NUZY(VGmO|yiWz>;zqeZxwG5wuhVvop7u7rh#-KQjpQbqyH|EOhQ-W${S zZ_^6jrq8m|@mo9xM8*K;gXZX7oZQt1>SjnwWy}vJ62D=zKU*LsHDB8O@r(4V37Uu( zVNLp(&`!wDzS#j&G63+S*o{o_54Q!7BXUer!uY>EqU*_+Y)Iz`oQOdQ!0|t`5#Tc= zE@Y5~#((&`(Y;EUq5e$CC{kuH~ z;kIwdDmkC+(W6dyvjEsFVk~x5HhoKk*MCNb=8r@8b?rj!S<(LaZ&}p0gt>afV|GXz zcnM>UL_uOl%hr>Be5JtaZk9qn|N8`f=(g*(#-li1v35G&Bl!IzPnd-VWSv?PzWw@GXF2+PaUMk+W0fh4@Ny8l1EP_mo{`A?4RE zrr1EAZtm>BEB-^zhw=e7>R@4CO*B5lN=kajXpK8V--*=jF?}fbCLR3q)f%#aG8kO8+8K0&>L?vKDeWwtDS4jvXnChtnD6 zw?a@8n@8e@*CeCRTC^vx)tcygxs-4jWH-w_KBN|$-6{Rh9#z&+Umk^r4fxjt283FF zdObAA^|xCn`Il^|wQEqFh;CBd9*cckz$e0|M_8cg)e4(rlSu}6b?ETY@%+W`8E3lx z$`UW9ei{2k1{? zlO25g7fj`}@}DIUE#kj0&ejy{nODIVE5+^8ulBlwf&Ha)*}LL-e&s6wMK2sHNluSa_4D7(=Y{N10A)~uNDHi zraM+}i3Vo*gKz-h2UVpR{geTggvGn0Ns6Cd=_bP1iX|?%y%_@y!_d|g+$kf-K)~6VBz`XvM788AVe9GF1Ek?rc0o#_(8Tt{)2vFslN-&+w6-;?wi9$ z-e#HgsD5ZMkXh=VC`_L;f;zF6CB4mTW1B&Mz-H!v&J6DP{x$D$FW02q>Uu|%T~WzZ zJ;xv#=hV^NTL%3WASBW6w~8*W&u)r&1JSb?#$bqtMPwnF66(nZbv!ACMBVjwJWP4( z?djnhrdi+dTDxmrTAJ6Vh!3YeCe0p0)e2Z|umc6D0ymgQ-$~Y!oSj%SmVBORtCNl| zO4LSGjO^-NeqE8H2QcE71{JA7GAz1!5~x8Qs&^rO5ziGxaPl(##Xm|jlCWm>(Lg~n zwHFB6Dpg=3B;Pgr_#P>zSsAv}A+tJOcP&fFOLkxcYpKixhCb12-Q0M_bcE%LTl4_! zC5D#9wh7}R{%qSpj&E=R(o+im=AgHv0OhTCehybSC#TiV9TBnyCnfmgnEDoPa;%W+$U65u1GUMW zZV?b@wX-}hja-gyxylHWj~BJzT*o18OWx_F{9-%UfM4qI)6#>Bv_20T{1Y2yZh1Q# z`7@%%TuPAIO-^++56Ii9w%(6X(CC4f1RpXtMt)Xdc3b6;_T`F8#0wJqv$2M00DAkI z#TG>z7v&ManyFzLlr35!KYc@_sFtcbll=$0IDE6W;(gY}Cd+c^=A8qujq=F>`>f4@ zZ6;nNV26m)rTet0CiFBpX^Lb3O^Yx z|3TLe$;6jR2Nbxt4LoWGDcPq&g7S{_b$Kp+{Se0bv;PeU&kW{qEajB{ZTUU-W`4T= zLWt6*GE3KEG$lyQf0Le<{=I!L*q%`>sbZ1+lc+SMN8zGtO6@VIJL}kFG}wY3#7liv zm8EMjks|%|d(7w<82<%+tPxpGM|@@3Bm;5wlv` z=uX@nP^(GN=slJ>ZvSZ_V*mwMOXnA}&R!r4=Vj``zTysHB3y**3(%-RUq!2V)>a@*_;J9B1Mda9+&&4j(k=C+4BBM?Y zAeW?{m{pmd-wVZdGoS;9Mr*1j=UJ~ zP%B-4jM}I_ks@p$LaJ;?sT>0?Uz^-@VM6; zcYw}N_1f@Gg@0~Vn(GhF@Nbv3>rU=>@ex7zvYY=X_3JwjLsDd?;5I8H!Q0!7eJiKq z-5B44oCn zlz1o;qi9zCh>-^qoU`I5u@&;7 zmAvg-l^51or$Rr(S~nmi@2S1DT%BR<$hrtH>Nb z3-}9f3$zEKk|w`k)EBd%Vr5of@6{*OxvD!n0EQeHrjVm_@ew&^*KPAKmQ5n z)HdDsS3jyjUbd#TRz(9rGxTl}yz_lxD$D(q@*%1EtiM(5^W?4Qi&H|3-H$HIDDlrj5N)c6NpV^~_$?Uu5Aeb&WL;cq|>Z|3LYmt}Ye#PzP z>D@s=yqmlI%I1`Zwe$k`R6rJhG4e=cZq#2*s{8e6Fu>9Su+)6Q1tD8%TQYkK2SRM* z_o~#~)zsTe3=5Y6TO_#WYvv;CpXF`4Vpp9Pf<4&3)oym?9*Ch!h zw!eZ}cebywmS*mD+n5^CEk3$HAgCO+ZZn{G7+_MGS;ssejlwkeLHyq(L#|%5tIzmN9bY^5!j$&No5?9B%D8EOCuD z#{n>9vbJ~+wMyYSrMJf_@wx6G2351p`&j61`cgagB|Wl;^{iIE2FHEGvu4b9)IL<} zU1miJY(41WWEL()Mxd8Pi+`QmSA13x}QbkclyJ`&nZyJD{?Mldy)mgP7x1 z+76&A%cFN6S?pfA+K zF{Hfm6(T@$=oY&K2|BlRIj02ZMKt$C)TaO^Sp0md2D1gohG@2zoQkat+Mt=e5C$tD zuZ47=%OBKJPbHpJ=BKsu;$FgzBLJozpzIgkJ6SX{f;tURyq%b*Dm!GrgHuL^cCGCL zqGzM#6&)Rz&CXAfgVO->RW9OfVDe<@Z%6$tiWXJ#JY~YDQr%nJv6w%8o6x6qS9DDp zPRhPB?vjrc;~x=g>p)jA z`Np4WH(7*<<2`QQQpJ-o0WNSzDI0a64i7;tBnt$?3;Kkkw3bJVlwLjCI+g+m4b6%* zd;#u%T!AiL*@dya(f5(y*teJ=Obn*RG`&ClR+3S%O)g?#l1-KS@R-jcjbJ0z`d+u0+K_CgH{#0H`0Xo0rOWi4RU&TT#h)_oq+H!HTe!It%7(r&(72_zIK z(l#*v72--dx|*?{sn8y^G=E!T&suS##s((>8<_&yA^-CbO+1ski z>=DJrhN)b#_u)&TgqM})L7h%V0pY)GVlTQmkD3jJ$e!gb)aQL#ps76EV>zlv50g!O z0&KR7wU<-(hU$etVH*I&Sf6va;i*<3=XXKRl^EmgteejCLI%%vJPE&-3n<6I93~7|7={AO!1>+WXmMrZhkAzOY3UD2o;IWt6PREaMjxXp+of&7nUV`5;f|pG zbp&+VdccIb{P61}sqU0#n%h5O?jf1#f5}J{#t4ey9N>B0MGtK-g$FBV^>fHO>rgVu zh7i${1qVwcO|)o;ab?g?ztwU;c~ZF$6T{r+%*jsQg>r92_Bc4dttHR1q=87bi6`ve zyU~Hf*plcH$tz;|5#7VNfX#a-ueVuX-{lQ0WqO+%*Zo2bT4b$@+K2ZMQR5754zoCu zxh*ZdE}n;CA>@)sN~IY)pfYUd5PYh>-gNipXOqh<@70_#|5r4xo5I|ab=3~)OQ7oe zh<#=M?7}|d9_VQKFXzAxC)Q)%3#GoxO7h%LtZLVes=v=5j((_Eaa!h_D?l`iXwNbA z6gXaQ*+c(gPA~50nI&QO5Zv%+PBJOwOYW2E-Vn0P8sy$1v+XAgEaUXf>g!&}#LV$B z15^-0bqz&ZiR7Aawdo={r_U}isIxa4aoSP6vr&DqXres=bG1oh5%-6(?oI#9-kyE? zkiF@K^B{<311$d2L-rRxt)`zqR$b1%)VOwB>jLGRt$%y;ThH%8sHneB(~gWnOkty) zJVS!O=~j;N@>WjS(3K@J(zLZ$$3RGl>%Fv2a{y&gcM?A-ooGZi$*fL3uF~MiRm+PL z2&{r(nX|1f;58<<|B1Wb?c4~(pBC3_sOm!}R>Fb8uE{xaSLE+W`a3hLEU>Az=F)iw z53g-mKy>b!G;nfjYRJ`|)74X)xZN#NNF1KoHkoLgPGSS@toXfPfY-X=)&FSky@Q(W zx_05%ZzWS838wLt}v76 zwSfBlmY@Q)-+S(C?EWsN-8L~C-7MZ~E$y(5rV~R;!80%^=WCA$csj^#izr8Wnz0%3 zsm{rIpCzw&H1D^`q1j26zS1z6I}VG?jWHh}3b3D#g3{BfuDoyJ7S4SF`4&?9ASv&v zEYnySS0{%Q1YT3Fa?t8lu_6_yg&?1BsK}NHmx)Ub?wTiu1Sw)W|FaeVn-h+lWPCKE z%s+|EqZW+6_$ph=6zig8zhvKS2g<=S`t%9Slwx`4ZO@epNpg9-4~`$QkYH z6Ma{A>U^egldL$P;=60r*q$4A2Dha@a#HQYWA*cgE&L(JM|F(tcCG^DwV5z8YB^ie znJsu`&=mqvm@McycKmoF$t%AR>Gbp~uNxc1d6NICk=C`KvyG&5Psc1vMD}>N+?j|Q zcO-r|`M&kb`ty33$5OHK#W;ER*X^5?;f=nHq^UR?g`WqriL40H6fyQ+wdj#5pn_96 zj2m3hN2$8fsMoG#IPoE?$8*QZ}zE!{kK@iMz^CF29C>YBfG@Vc0s!$Y$iDutHUww#{)oubrI z?4w~&`mNQ5P7zpK`#em->qF5EET>cDth-qhT}a87t)&xH0h_<1XQgc8FT7pm2`euz zhhg7cyNUVOv#PG(Q$9GvL#@Wf14gjN&i zgsV>YYM2aI%NL;DgLXL6q94@UUlbc~^HG3~YUa>us0T^V_?z#erDKikDl?0-p2Yin zwil6o$N~BMS)BFAcf9xsz}(E|MA!oPI(@8>{-FQYiMGkgJ;C>nsiG002d-u2GwkVT zzrl5CXEIJxSEkJT!7nc_lrTPcM*MB`#}WyT^QZpEX)g~pQ~1Ej+4zQy5~s^qnO1ey zVfyPiY#mi~!Kc3vR4zZH+t9J7S!E;9Nw#^e7MwFcnLWUAYQm15?-l~eaDQ1mi?~ua zk+3#QhIbg!Eca#KFXB$BXy@m@uz^Dyi0K_w>jOg#@~LUoxSV+s)k0(&F`a{K?aCo-y`MiFZSA zObUG$I z<4<^d=fG+r#)Fm}?tPk{AH_Wqt^2%n6!pcU>-?_s_f6)A<yovv0k$G=b`g4 zI<~In$ik-)MWO+=u%F5uKT8~)*6L#}Y1LBX%=iD=05f`zM$^-=%Dpap>Xb*`wSd!x zP;#I?5&=(ZnXP6SAmz>E#6Co$ndltXU9o@eA0yvONVd6Fcj3|kJ(k~r#`ehEbMDPKufnwFN@!ypG+`q2(DZ`N zPq)=m3{A)X`Jfi5xp3@~+Vu-jpC@>?$lu#<+C2UmbK~sRVd1Rli2u&W{8!aCBV8l2 z95r4mX=UaD<;GX)@E_vPllj~Zm3Gr>!*|b6$nxe^6ODb1P)f4z+p!L7CAhU_ir3F( zhxUoL72gpiqhjZWqASgXqg$)aej2bhQ1grXds(lO`GDxik)1l}^DykqiMCZ1uuS;g z#r#@Xx$T2b4xUX5_xP$UUtw zzmdmK`HTqEm~5?u6pC#q*HWO4_;XcFwW~J=S~6O^92li5>cP)tws-2(=GKN`<8_3m z{fPcJ4x>FJ(b46{2}>bEM;9t9Q@axuuAwHYVJ%U*vC;A9)5pm6*JjXkIvV}i8d(SM zy7m>z-e2|1;|-x#59@W1l}!r^tuL&s=JRzPr)6+H5U+7zq+K4@nMh%;aJVWf1qCmx zD2)$Rv4g5d`FVZl{X4{78mk26y#NIF9?6eoe`8aSl#=Z_NW2ERQ}p^Ip_*{&!%J_(Wx5qAFEH7qDT+1h~4pR3txf6tFbYd*;DOS2bAIaFau3 zP~1a9aWR&%L^I63>(0?5=OuTjf`UN4o%;O0d?`KyfuucfsA#esOcGIp3-NR6_wj~F zw_F4}Os17S2G{eK6Sd#$u&do7U;>iOqNz{FJ7awO=t+U+pPhz4{s@|KW9d>|0}3+hLAL0%ra`QQCz4Fj~H-)7N7s;yn* zGfSF#Q=tl%|iPr%F4E^Tg}-)`!~>@T8)Mr(OBPrbj2b6B%3%SaIGJsb+_<8JgD zcpd{;%6t&2td|NmG9$8=51cx)yI6*p%(S+kwHGN?d#u*3AK8=<^!?aftbtR;V&vu4 znzaonwf5f?0|Z-+Y+`CHbj{jb;wP!3 zG`p^PWMfOT8oaLic@KH_vh*v?k$l&FWq&&MK6h{|a-RRx#B$PI+r63jhVAwQRE690 zqwjvR97999@Id!f*!lXt0u8L=>}N%Vj%0DnXoCt&7>Y#Q3BIXXCsW?fkV!H^Hbhl4 zv-U^9Y8=c|o%=7;J$$YKcxCGhK)JNNkg3+xr+Nm9lEcR=K`3bt5 zXN&Fogb(KrmHjp=RK>5#K&m*wI4)%qWOfAjg!&BEhN{E#@ZKvg3X1%;J(JbC+?L== z$nxKghZs+Ex-hTgBp>mk`aQcUBV>++W;OjwVIi_*nf$IrENW$Os%TGZtVkG@V1UI_ zgvd%V&!@Rn_fEYRkAge~b_$}B0;}bU_)aQ^J2z}?Dr8))TPIV|zk5Rt$)*6Q;s>q$6KLWtFG)kLlamt*c&tUh z_dYrnAm7&3IS>iLITt`fre!8LvkNy&s!#fAJ%JUCs#w_9T4KO70Iw!Q#I!rH| zH+N~t1ik#Hq}N6%*1ECC7%IJG1;0Zw$Amim?G``8miIKQ)tE-qZhS?pg;c`|VDp3L zXwMdUj6oov4ch5n8qbkDx~n zg-x&;^mSyDCR%}XpbyCulb3^k8kF?k@zAVhf9O8#CW&42aZ89R!sXCnpF254_1*Pc zcHbgwEz`G84&l;CQ1>ZCs`vvAETgmfMCns}NDg&d!{rJ7dXPU4fC=Ztn^zH~XBn=H zwuQ^<6nuo4`hiBD(K$3}bmsdlHoi}oX8XA`8XY8#0ML&-+=jh!c?Q1m!CtM{>>TzHBFWU~!Xc0^!09po zMJ4+q&1-YqOUP?H%Nv^%$7hm#`hLJi$4dng9Y2uclKdiEGAc4V96Aql6V3^} zx4*D}E^^{xn7E!-p@ggePHcl$lkWzfvXt*$TdTn5hKP7sIl2A}*F4S#$z}RVQDDBl z;=b?&Oo}tkUPi`m6$wihxMBDNlZuO}Uoz^>T#kuJ5SENqz41C=X}I8PLQqiuV+uE$ zd`i}EW{8?Vn@;#oGqWlNTH z9DSPv8DxKMT`YI_t(dHKm2LOkI+(a$AbFx>($Ku#BG)bkJ@QTEgoPr z6|9-z5Q+62&oJWnJc8P8HDDT%N~nBU9bk-64UUMfm#FmhQPN?U2vO_!6%~v%v7kLJKD`ubzjCcEL=>j0)3b&)b%7aeIGxZxY?T z%-i*0np7Y-V!55-Dzk2j7Nst4`#zLk8co4vJqUg#L*+xA*zJE0F|cEw+;q+fzuKf&DB zfV6(<9%*Xy*V_(&!XUhNOWef)(?0XTXZtt?7JK1i#XZAad1NSSy;nID=ZPco$w;Dg zThqReTW!aQu-4cNg8{l^aPBX zBaBkwTWO6bevJJa9_P1e)kx|$KK_hE=Hlm(|=q@OGdqI*U0YGIR;d#hZE zYBaA8$W2Ki##$s))xCKTT5nc;A1Sfgdq!*0?#m!F66&fVKK-7aMzy+kB|;&cQGWlv z3%pMMD>vEcSEuX2WQa=u$*fCgS2Le!meOf$J--CJ%64OchTqIwNP@gwjMAZqDW*re z-u2*iuqBdK)$S{RFPPZz4rlwJv5gMxw;o(&N=m|m-Pg(cRe@jWI z@WYuy2@A_@zdG}`FdH@73$YmW&KKfU7xT{w7i^b0^8_uvmb*nSOUx% zli`njJnq|cO|^HZeUN{VlIo$Bo2nQq?ljQxg7r5#EWF3k1#Yo>0%Kx)0$gVswt!Q< zNI~eE1HWwOJeFS#6T#4?QD>*Wt0D{xJ&%Dpc;DMmI@?;~(|lgqjdNtm;c08U`Ve!Q z98Sefzd_jk>YyUKby+x1a&IrDXS52td*Fr~D6pA;=2^LO$k@2nulLZ~XjB&!7N5S) zJd8orREFvY=l%0O=wutNPl;)y8u%{V-|5q)<@T8t{QG3C7Q$wW!s1soR%Ijo7XxP$ z3QJ?<cqZZ{=TPk_EeortZxL+c+XA!8kB6F z8CDvRG$6)e?4Gh;-Gjg^cG}Y+canlocypQ0Zq$9@8t+?3;9G!k z6#4eO?JRMa{fG`M?b$#kqX+}r$IsrwU(C(e%^?&dXviu%s~Oc16Pz3(T1vPN-On_t zeTsP^mU6KvuxDmvW*`X;giDG)_U> z?&%aF^y zN%%x(p%G@e!7d5fYu0WQpFOU|zTHRn$qAI1ZQb06O1rB?=g)Zc6`BbTfG`(I8cw8Z zX#ftlkthq|$@3{mMIgpY9T`x;Z3rDF_>bo1Kq;nzP!!dl z<7-Jprf#_%e*S*86NP5t%uaII$v_XkG=ECWMve8#H{CP8 z*fKaf3(h-S(4V?YhWptyxtlFKb`O9h}C zbsm$R%whySTvQO4j+He#UIZa zrRnm;ST`J+8eU4NmB33G&lAB;={0ZUK&XDU&=4taTHB^g{8`@4YAU)6q9??JUv>Ig zBM0TBPcjl+8E;n z@*^{O+b!>Y70W>wDM78`K37$L-QH`!d)gXNfE!sFFUd}K)_OXUx-2tUbUOxLzxg2e zG$oe7{T`T>@D;JVbjzq4a%?a^QPj^zpL!QB^R!)JgvlmPw6vEsOZ@f7Ns^s@Ha>*I zeu}f&dMQa}#qlz*4iwmZsg;v+oGlRK21JcA;0%(kuMMlR$r`@uhZ(=U9(*P_ii{}m z-q>3KerA*dQJW<2k#=S9kx@9)AD?33X4Iwm!pcxF9&Og~;vybdFQ5}IDy>Jq{$M%D z)W&BQ>yNh4JpS?_?(uYKs2>qbgtc6~sGtdp)4itYeOD!ZPk3r4iYA1a zlzYI<#_vjF3+R$IS?O}&5-rHCuTjA1z+7x_dQx<}QMU1KXXl{Q^v0GEJXkQ+1VG8l zHyzk8!~t=+w?-E+gRTB5mgZmvuPpU(FVVnG_X}^X5SCu1C}P7pOUsZF&+^Y}rl<`~ zD4qQ%laM!4{v^5WS8x8dVD^g_{M_Ptyeos2jK)dcOM!={-Atsi5k<@1YkC$T_i8Ha z*2=Sf)v%EqVg6N#0{|pt7ZG>1wQU~ zy>%I@%9#xQ29@wz{H6RtqBz6Oc9Fat8>7K}FH-*YPV48IwxKoe8`yJw#I5sTXCWkc zfkv`^Oww2z?@h=z1`s+zcpq1)Lv~5}WWLl?^u`xd|EJ#6TgSPn9+@RI3Ar*>cE;8` zh25TU1063Fh8!ycvcD^sqSMC;?0jOU z`S#p}4P2y-L8>mX^BxPlJd8Gwe^tB9_PyN_LFb?OY`zfDyqBK$s{l6SEZSe^MwIqDd=Ps1 zE?^2)^MAJ9btaukn9Sr@3%&@Lc(pY2$q>0k|$p!&P|=$6p{aUM@~Sc;jnI; zkH|cXN=`pm>_SM*%12!psft`(mOHcDwUNTOz23zw!I17{Atq=!i$G2ym zHnGW=*pq(7h*7Z-^PNK}ArS9OksiJ~O|>W*&$78!=?W6~BK=w_ggu-Dr`&J^h?Hpn zCO>F12uc$0j2OL};bJ3x-r<=5hg4+Y-lV}RkC_Yu7L2QRA!-FboejieHhXUnFZ=8k z(Xngg7UboXTrlp@pH^^yhp!ldneC`W0bbdoQDL%|g?=K-qE0%_uLH#T;?qs^v?@<& z*Cxy}N#1h2cQVPF+b@T@&DqL9_=8%T8d4ZAix_ho-1bs!``y}yMpiCifFd|Fw$@}p zUtAvd+PhVE*m2ivym`EY*|Vhqc!-!$j4dYuCc>R-ZIAzMIm+a%Z-|^0GV6-jiy`F7}?R3#Sc>Rm8tCVWh_i>yUm!7QOr0uo- zQEl@YVbgdzyxybbnnu75V9_<&KyVgF|HWyJS^RP~OEaz9@;zl$(}Nakxr65Upc*qC z^R&~i4HO1mEJ{z`{ao*r$_q(zq&hs+Cy@JCQwZcbMD^i)eV%CZI?8FN?6fa#pQ5*D zQZvzF(4-P&_v2t+G{gLVl_n1JmB!&+km+k*HJcaeXyOj6|xLO zd(&&ijnP*A)NbXQK&UQ{xPkK+$VB6!UMeda))5cY;N`pY1lyqp)bvm0A4qj|_2NUf z1i*=vOGwWZ%3W*gLut65+9QpIiyy$Jg=ZPB7%mHvTIK_?-?m)pD$sgzhrN#25+|3i z`7^0()YsqF{=0$;dR^Wqj42I7&Lcaw`1nhgQe6-z`T*l>*SG!!s+rGxs}dOiw>G=% zZENFLJGFmR`r0Z zZsS#ulKIKM0Thr*oQrNTE|tNIjH)W{mldD(+usooOEY6pI{s}4hfG3hE)PoRPS(rn zd3TsUGq%>&w(SX`=FgBT@7`vDR3KC~4_j|sW7xYma;>3o#1v%$!R@en*oNplS4*&v zB#W|+D^HF)4$4Gm^ktoai61;^V%O+TLdV7UJJ~)?n?Tt6VZ2r-Rs=(<`K$P2t{Z9W zZjy=Z)yE;)m;9;5g$Z*k#KU~G0c9%e`LR$gx-|<>rOPBEc5Uph(%g$Dva12|m-BCl z4WeCb>%H|0CNk!qT%QC_O^7qbK^cAhs+K3cp(tgvY#soJ~&gK8q_bf|H4BB=?$FE^EE1~kG zV--7zECJio9CVYp71W#%F5bo-BrfS-qFE~bhr5%({W&H+n6<;6=hO3Iu0D8s0(AtL zsEL#IAfkBE#R=J39VrxYKJIK|Y)qVm0BKME;7X4<7~T%wU2Bb0Sv7^jYM?l)x??5L z6?V-ay0h=eiWFWLSBj5$tLBu|uTgi|4G#iTl4&>|(QG86G_Vt#v6;`>qLS*U}xv24VLu&d~%#_Jsr#Rn?&+kCNDSJ?Puvwvmk@` zdua>s;8-7iXl`$rzP53mA_69UZqsO9qGO;88q-X-R0N__I)Vbg)*!=@@ue|0;@DU@ zuVxR44mV+DH7;vWrJ0jsG*(O#3k|ry`X{$KLUUlP%F^^;$QotbproRib&HF%t@{rM z;lycad)6nC#hf406BdUH+Dej3#OFR&tjO64cz^iV4R$6j7#9Js<%n^lW>-ITb~O8d zU9@S;*+6>9Xz0=j)XJEw2`Yy_HCQM)#n6AA!P1`pCqJ^YW}@xGM_c`Tq6}b9!WR0f z{|05R6AAJ*sjJ*dY>0%V4|7wrgt25JwHjRjB>->R3L>Q6ug$3u$9PreM91YIBdH3I z1It%OS{u}pX@IT)B+~v$PJ;X{eXRN%ahtHy+w|6N;qQGpez=n_h%R%qn6uBAlYSZ! z|F$SBMcd$ubP-0M)?j++25NwIT^hdofPVaCNLHW2w$N1v3|T{A|_Xh8M-)g z8E~s{fFKdtB<#4gakMF_vaeYK-)86#UA=ERvd>{6 zSD~l0B;cY&pL>wF1OB=1?!GaH04%k*s}GvFfgX~CrzVF3w>m)rbKh@7+DxkO{^xzq zBD20Gxv>rqzGny3Q5NYAq7b34Sy z%J^Je(Nd3f<0wNH91?^;*NR;F1fN!F@gt1LquYP$h88oJM24eSILN!fLE8&yJ%=^@ zZZ9w{BQ>i*$&|xavIKC=;PZzN?FMR?0UzE`jn#TdI$j@Ba~RU7)1@P)Nnd->Zpj0FHiTD+7HP#hxnNTI_Nh zlIC`lUGG#^^7rfp)dB_mi>EQO0RMt$9KHg|>aS1EZ4z}d z|D^fB3v(jka{~gVBS+~n2bJHQc33^rHQ;p(^3t5j6+xDg!CC32cK07Ym%9gngg*!T zGxhTnLe-c5&M$%fPGI$)wX780A^EW?x;VKqH`pAp-t2ziB6v+;%{yu#1~{|9nTj+m zWMD36_Y1T~zXSm`gZin0SVJK5AtRzi240#0`RER_swd0d?Dvj}Cm?z1z(D3hUPEzl zd!v;C6#3vIb;%3;ckKj~AOoMk^Z)P5|15-;=v7e4@oD$x6@a_q_Du~n$x`9e`VmE_ z61CRnDdi_Cbxd#;RO#hq{#%})7UgA%?SSBu6m|xR8p+2u<*hCCl70}62X6fYn=9(z zZXDgy`0soEuZaGrhw(P8MmpQ{7`9WON!qzz+(hkV-fhvHg``z5m}FhP^PfK$JIDlX z{g*0d9FPB#Bw$W!aAmlZgiYu<-xq z3vrYV(rHkoV5qFB`mahKNI%<1HHPgz>iV&lDP`6#+M*)=W4IvpO#hW4AjjiADa93d zK8NYK|9ql@VdWN4O6qLj*2A8_^A0k=)dQ7a^CIU+9JM}>_P^Uh{M+RKS1~5GsvQ@n zP+B74OmzZ~K)ov__A+la2h@&#Gkp=u-DS#BN!3xvH|~<7mO&tlivKE-`rm!|)2{)k zJM+GktGDZGCG|E)nx^O}pMI}@qLPvz>$WVnA}U^Y)>2giFp!Y{ipZ!An0oQC|4&~) z-IOA9d)WxTI8)MCu{m4$I|iZsQs|LKMKX(#!xl`P#`93+i8gi(-esSdANc36to-#$tnkPzT9n1rv_6e?degW5jK60TV?%xyUuLYsclNkzBkZ+0Bb zk(MfSw?q$PN_#c=6hw^b$@e$U|D!)`Ei_Q*c?Pr6tuY}VKu4(dB)F|kVww!=7nX2K z`p2(NJuaLGSd*W%wi#VI9iEaW0riU=t+z0K_am{g*JvxHa^UL|n859O4?#7!t6%Nx zo_|xszT3okHHBDx>%Z_3?}Kg;F9f z>uY~T)58MB42gSY6vMJXw*DMFEfDxnbwpPVcjDV~zW%P%hD;O5@Scd&+S&C#u&8tA zH!<)-e)w+NWxw5;i>A910@9wF?=EM0hNCCp-`DHS%2zY&=oMgVAg${PVq(Mq^p zJ{s%q!+$F){rDS}g5qLDrhLA(C+d;eKVz0fOGW#(+vB%l6J9;&WB2VDNgw(Glz2HJ zwZ1eKesJ7lsL!RX57&~uticoQoUoTf$2N?5r$pjbb@c})D>8102ec(?hExlNDkMAX zPHbPh_ds%jl5|{&nQ4^4L{K{RspKW`U?RR(EA=Wq`$20Is`U?pBJ{A% z@Gie|4kcHXzICETOE$Ee_NR@K@A)k}4T5G0zCX3^*jX0DJ2x~&sA3Kh&(~LAH=n2> zL^5utG8y4g4n)WYY_!+hO|nS^1%;pVohH|VQDe6YLvj?zFkc^EkjU$BG;A#X;C{)- zmzq$5Z<#bYhN!#Lszjn>3=23U%S`k*{LHl`1M=P6OK++ouOiaZ88r+;E0P_=0Jwob zZsm8194j_HM;O%eP8Q#F8P5++Nugvq`~Fc_8nbzvuUFh*?TuL;REf@+K+j)2D71^H z&0EhA>NHpox8KXqqCNYm6CPx{{yyFyAAQwHW;p{H>YXO?t9_vIygAD2Q~1a6%R&x1 zk*G17-KK0h@^VxdGN9)*)XZu2*B~3RHWZQB+#$9%__d-GllK_b;h#QwhiYkxzDuu; z|A`lw7^qK9r1b=LJor{5z&O8SdF;0}EvuYPm+-DCBMhGS#MzUK!p+QDv*e^Cj*WTD zI?)>-yF$^`8n18AI?WOU7zO0LTrm3{`kNb;L2i9DGk)_K^!6~U~d z-sysk%`m6ky>XU0a~Vmp!7Mln6ZmXSProZk2_6p=5O*elSImh9g13)Ny_VRqmhfu% zL=9r+xHyfA;bjZyaae<(zTu++l&tOcLvaU#y8C675ve@ph)R?rC}sc-*Z`@eVr7%f z(pDcp)~wcY)~uK+e^)SCJ15D_F6SuVy(Vg$>_=Z1rAV1;7s8-?YP5G|-}*%(oIRh5wB6gGSuX zq|s`VJRp}vX|iHsgFn?hIQIPpNPezWIqyApBD?3)H9HzPcP@?-oN~`Uy{)O#0=-dI z?WVBPUcdKEo88}PAcu@wzGEB1Ea_k;GhjiC!${X6;yH011(9SbhEB_pPJ{p@#rU;W z2Gx+cn)6-D;xJcq_9Vy~$a)|D3f<_=iIV!_>#)UqJ~b^3os~5wQ_QaV*Riqw{2@h+ zs;LyWuGbMGB|54t(@wKH;i)c9nPH)+S`W%F{cg$XBR8_ zXAU{IZ2O^hr}V~&cyHDTd!P#;=f^nOs!UD&E5TOrM`ju-!h zYBY6h;vqjDx@cSb6{I`&XC>P6C2Yb8ouLWDF$FcYgZL&>{VeK~efY6yw!*AolUK=i zm)ppQuep?_VaERJlbpY#L$Z6=4&L#s%5ws6AIn&Ja{eqj|$Eaim`7ly5a1vB8Sqh8tRAPgxp8_=GhH ztI{?m7WgiQQ-|{AVt0Kqzz=yFT*o@r*zJ2NLT~a3aqHx_hBV!ICmxB=Zq!9@o&QI; za?+c!75Gsw?l0*}$45t;N!X9XHqi6vmTAx3Z_bTr(tAI3;3RO&;pM)+QE-KK7yNyVaPIdh~|h!ZiKxVaZnjQydLaUde?DQzEQY0 zuifPq(dQn$hzl)~j0q(PtF6b8yVn^}?@ z2UVs;sLoEvueQgcF^6yXC+<+5%7Bfd;a1^uqF;aI~$@5FGK$?%@-Ei{k; z_UPC*6Tv=a_MO4A1*%n_4xpj|*l_-So*x}Es=?^M78#1eDvh5B7u-t7bZzdsQ5xnR zmnKFN`WfJ|)x4Jd?Y$G=#d!)#D>Q+ESXv1D@Pq?jR4K!iE7#gW&IY&JnuzmW4j-VDzWi1ri zP}$lN!DP)&?VF-TUi8^O&QRs|r}l!RwY1QtA9EbTRtDdqNvkSN4_yjN$kA#vKi{&v zml8jFsV8IC_Iv+KH2<$I_t850(8JBZmEWK4nx+_TZb@SO^IZFG*}UUc4a#H_ihGc( zCcRefPjzfBH9YSg`(rK9-tJt6T`Bw+;UhCtv(3cs9zr0;{WxB?-5r*oK)T^%hJD~$ zJuX_Y`Y1qlt{wZsan11_Qcge)l{ARq0_F^0tzmlu<)@*I5P$EaKtMnEi%A?Qr248- zsF5-TpONPU?XT5P`PvZc_79RZAiJkd3M~3;Ft5Z&OLOlREJoatTaA$@JXkCo`Y3pn zEiAOBr|Q}v=0$o|pH{j*eVw)?Iw7aI!!!=(ZEn7Cy}L4LdddDxL~048c8oQ50&Q6J zm486C;paRa+B$RJYHt_E{j5OB|0pb58EEB(L|qvjuBs21+@Zx>QQiB!#-2bP7N{XD zN)8lQ63^jmY;u;nVD&wEl^XrM?>XXdgB|(@t0l&5sYT{cWv@eQ^}q;6n0s@Fe$wsY zt5V*2@+3SuF>x!7g{l}}%)Cx3hVDSiH@dhh+*a0XXXodxb-JO|{pi>;Q{J}RTJ)*) za@Cw!1!ZRgJL}FkO!=dS+kjr~`_F1I z`)x2_t{F5ji#z9>=Xsh5{y@K?x7=i@v$`D1!2m!Z_YE}^Amy0E~BpL1ZQ0+RMU z!rChjjs}S5${oe%oR0poEkr+oD9^1Il0Q50)o!$mfY(LS+IRoj@!Ze9ncMs|CBcz2 z3Lr=WWS&jR7Dyd}0E&`JxO!0A`|rXH zp}o07W~|DO$YJK?_KQ4{lF-{zrXW!Jt&P}R_^yi!mf+J2%t}==^t1oi_dGNLXGyBD z@VT7J?rQ{lrz}pj_SwkISeMrV7IJsSeZj^>#C{DK@ECt53H{`UAHi@2Tjt(KNGAq>>h$|{SWyJR=! zeK$`>yB$XqDXcf@KK=;eZr_Y>c^g5{8EO3(o3@Hu(^)Ct7KMQ*kTbTNHswl;vj z*^JhjJxGv%3E6!se#y+sD@Q)ykgua2&+G&(dpel$F=8onoDC&}VBi(0g})jo8TzJ+ zt5_RrU%%;t8)X=vuepL7M2W@VJk}OtNd1zLPBKiI$-^~XRv<~%N#1h7Rafe*Q62d= z%)p2qL@mJ$uh-W3ySrH-05f0``mx-4g!8cOd5Nj#Qo3f{X+A+t-Pbq5^o@u`uooUVm#TY9_nCFQOe0+zv#<{G%+41jm2HxHOwU)<$8)c<&ij-z`& zR$&nz8%ooWR) zYPzARMg70jbM*ysQr_z@y z8*JnZnNM=mg`JonV^bWm8zcK;Qp14~f1yv&g`p(Nt5yT3lC)7B~> zj9TB}gG(u?-lzT1pW(!4CUw0ky^%=cz?@yU?O`_3*oT{PsH=n15@|6qR_S8o``?$O zY-1$IzWeTNkL9unAWHhqXg;?NZ`b`Ck8B7yY&Mj@`rCGYSt1_s1KD?I{paI!Id~bB zeG6_C`21P+dR9IbP9uecPD9FacD!C)n43ChMnd!ej@W`{&&Fh*t;4`>T9o4vxP;_Rq%+G%TWW@g=c1&*jWu*k0CX zNxEh>#o6m!YWou@qe!lcch&KQt~o|AJC zpl9~4&XK=|HHA~L<~_u#QgM}Fy{>?7Pp_1iE3XC!6PPRC?zzbr@1NN8lRS|o2H^fV zpilOuxX7e{wmVX|Lhd4UnXm0cPWmce|>1!uc46D0lep;HAQ`q^Ke~!lW6@NXl`tMHg3pW9>r^$AMB&d@! zoh1hx>&Y?w50c?Ar2q2xnWt&M$&9v9-#<>Gpmbrt_)J&=L{fw= z0cg(%?dY4|e27`jIL0MX65FT6y%;T-iqwVO!AUGs;uR`l>);0-BOOEonMX%Wq(2+z zH?!}b8X51s!QWq;{fq}w+4~Yu1)MgYTa_yDpwjZ1A7D##}3srWv!9R{SIkC zFgJ~`=6^rF>(dvXWQ(l^Kr?mtZVFrF#5S;lGcNjND87nkEWL)8ek3X{j>>Sjn--AU|Qd)94rTKP^x z6BTZNtu?Pjd;}9MAZY<_I`49*jv#M_p*kJChS;=fbN)P<(u_Dl5?Na_*A$JWSEqAH zww z$KV)SJs3q%9zj(CO!$9$8s@)}#UURvJl+|N8xZw+j7#U6=mv{}ba&lg8J`S-Jbho$1J3Lh?@Q1!eypYSV<=G<%^YOnPwC rT5Uw}5y1aH=Uo(!V&wn%nsr2njZT0*T;e75F{;X%4~rf=d-Fd4>iS5d literal 94240 zcmd?Rbx>SQ`z<74>u{TWx^AdnS??GQhg_K>>j#r!)o8R8xeE?St4BkkjP{bv#90!32UA^}%(?*Zy!Lq^`jW=y-vuBM{ zCg>Rd9I;ceYbQwW$^JdQBcF)=d&q`mr-%G=^nYcldH3(hT%UuS|8wT2yqas_AHkSx zerzu18vmS_rr~J+0dq18d~J+A;nibqR*q59IbX*A-@jixf5HcD;3!e)!TX=P#~GM? z`uB?VivQDltJ8@}{PRRNf8N_*doE-7ITbF5e>7Q6C-aaDM0OJHVNpwur1`PniA&!v z#2raA13m5MBqq*6uQRunN0Z}$Aj0!{uL&;`VKiOdNf}P1>1DdY+rzbW#=T~{J(#pCcz4UsCg98nWah6D}7 zV^{i=sg3Z)C!zUL8gjLXIXb(;{Dj|KXAJ#46iJ?UBpWI_t?=SgLpI+DrAJxWIB~w< znZ!-DTNkEhCm!o65=mynJ8Ax)LyQBjmY6zMHdVS~V4v5LJ0nu8c`8=d?8)@R zS?}Kvwl{#*blW^p5IU=vG{@g47Y}IHIIqQ|K{HX+Ftbrt3U=vxo2yzOe(r>p{svmZ z-9S)EPE>4ftJ??DmcFcG<6;{(Wqp^d?}Y@uD~4U?XK8w^ADgRnmVmg+dGC@>kKx*# zR%+jLff3%+034bNL_8_;YHV03UvO=gWPfQHdg#V<9k|vpYHIv|7+oJ68jPVljoiQ- zwmm+9nArrCaQJ}NV}+3ow@Hblu&nQi{6c;*oDM!th`et6%*D0YW)z9cvK=)yY57>V zgml&A)4PrG-Ul?8Tp261fjKs6{3cdMk;EWt*PL=veocF59=W~w7Jd-ka__q~Fvr$m zr7CB!L#iWHUT_Bb;ApK9|yJ7{IGcd^=uoz zPe+yrJwoPWhEv$x0Oof!?Q}ZkF@4Y}mL6TX@qdljeSY?`W`;kJXtf0Mr!^ZVEoVm; z7^=`7>XGC%w}z5VmzIa%vqoh}e4I8N=Hxa-L`yzxnQMr_5M7$A-5Y5g6Z+Ozu* z!BpSYlGlHq=>FhrqpQAczy9#4s}%OSYFZMX1ib0*#-LJv?>1oEVbKi@8Trjv44f~H zp&{+Vt5rqgeY9A1do=SUHPhq!hPU~!XB7eE^?9wu!{p+u)8CfFqOH(2@1MS{tfd-J zXe7v;1yQRzBx-pv8`Tmbki#>%%ULcNTT+9n*Xe>{RnL3+1?_!b4d2rBd?wQ>T~k-h2$n)*d9xgFkb|S zycsspn_V>e^6L!<)WXY(zU9G*lN3qG!msn|{UMny%{PB_xgZ{1Yi)X(wkSC6U!(<6 z7D`LlGDQ|At2&g>s|45q{enwqNvodk;r%tjNh9zgOtc!a`)Yp+;|+OcQ3oPtMj~M# zt2Cs~^Zm2z^dq$LvICXVyq%iY=IYG^y({h7)*o)^JFY#+8WXQN&qZTL!F34Jpd-dBQr9G zeYhya-0Vb><=@#;{x4DczOODW@W zI~z6pr?Q=mO{Z3oOe7FdYg#Z2&y?+<27iP}mOdLU@9Gj-BzZu#4iIOx8@{f)eRu;~ zmo^8!6UO{|WV!g&bCg@B>E9FdVSzN4fxm9k=<;K8Gn@ehwMWyZhsX0N>OaZXp1E_C znl&7&PFh!b+;Eq`4v+7h<_}$ty!AjKs01gGC3pSaqL15$H2He7@4OyG>E^j$C?j6L z1fzXxMCIO}g`!VYDfM6Ov>wkK*@TK)GJ`R`+wJ*@NT@mQKtZ!VeWm_OAYijJ>x<6FWuL>UoZhlCnj z8=G#^qa@I?-ZSIbc|Y>0s)CIgNkqd#|F00F;cVliRi01qwlQ_M;}=)uy#3~`h=1Cf zce^%Hf3KAmZXG0@ZhsNvmZ=V&J$8Uv&ZJ7><%EXyShuS4I4aCsnzD9Y#N9FKBzD$= z^SdXqGb$uR^I@&w&s8_Xo6b7p6+d)`=Xsm_*c`j~t7{-;2xhs6xV0QWDZpYD>OTdR zYF+wv-^;Wm54nneflzE*At{*5+<46(H|*xmR@bZw2QHV!64z)0FF#lk+2)i&a~~aK8w{n$68Oq^pxPgtgscAE&&NuTGL&mW)(uL)N^9QN)VHg5-`0@cr*O~pur|3&*dIsgLf>*s`U&@Vep`h=WA>N8o6NivK+f( z{Kj5;8i%p9grFQ|sKJR~gt?!VYAtJClyC#;n`3zj0auk{igahj$s-+qk2c{jH&7!g z+p!NCVJAca@Ac_Xqa_$VsKB3f1Zr6dP!LyH6%udy#W16RhE3M%M>d05^Q^dIDT)Lc z;-It%7f9WQnn^d6!fp3+T*KO`R(7GXGz}r9=Y4g`seN)I?CX{#XKiK%TRO$GE~Ori zWg|3pW~T|7{IVlWk&KSRMick9Ni^R$UGC{fbtR2TVey{N;(nHE^Lo4~#u;neahNWf zd+RSDS9O0M$kJEoK5)82BC(YdsCpFEF>W4GqE6~Wz#goq&eh&ALix_b8v+}KX!Cq_ zNyLaKqv>S7^}-y7Q9OtX_RNWf@7bjW7@S)<+RNs)-B;UXbgw_e`?nTV@1+snR+ zrP8aL*;2h34|j}2LdP01#ha%X*b!svc*um;3MUXkP)o?|xZLjZ0E+;bNZ9Gi@FHAo zgJQo%zrWr})O$XyG+r9@8n1j13?B74ocFf;c88l21r{5gK}xtn48-L3_BC59BIkA_ z{Nb#9-zj$V?E^v4VpTP>?Up;MBIdg+J3HTUA4cJB@@(amWC72RkfNMREj#pDm4!u> zoLdcJ>;jsl6+F;&+Q?@Y#2Nr?qUsKdJh*qothSZWIIB(0+s5H@dQ+garnQ<{T7fdBL{l}n9)c8(Au3gQrBHY9;Jh^ z=5RH*jAoB7y43R#muzvc#yRirA1nkCM6tKa&+VpQ4Ko2kr}?sR^^B+$ILV;vn&}=% zuC`xw=?~vfPUg0NWyFo=hQNTO=9BUnFPKDUf6@bw#OvJY?c2bWL5j`kQ0Q;Zb8>!8 z_y?a+gRhnPuGi{Li@pv;{mg#4426!jA6-tP4HkZ=yOM%D?Uj?0%kU-)=}}ejT57P` zz{2vK;bo+qGdWsc%Y3RdT_Q;1a_g@unP^yF;P|uPWmTN`XpL~9v+C^}eBF>VbbH=v zis9r~zI`N~3pEf5nQt+{U@upHYqJL8z282o<-!7eL1hksJ@9YDMN^7cn zmN8wrw#s;|vowOLdYfi)h%Abh7|gQ9%w)C-9kF6H$~Py*cStVz-e~ z9)9Oj>i15oRqFqJ<9WHFTYq)`rQedBocb=#Sym&$Y_$GXk!Ud1#FG$5Zk4)?_&ze`iGbWc#*r^$UNDm&Wn{=-!T6 z1FsGMvXnO%QB3qytQ+GLb{rdFRNA~HqABTt^@<;x${r+%PF0Qe!prk4$LU6vWPC$g zAJ61XZ5qA=sGDK{+uctlfAxCbZJgbp`WXGcXh-&V_v)bTrtRHRQo>FwN8Q-9LsWu4NddCbTI3jv$cd-fajQikZR&d=yH zn`MF5CZ#+gs&!4hK4pD6f1_kxG!jzl55n9dHNIFADpY4|yT5eE^Rn7WHqKT0?7?ca zI`&raIu38!Lz}u@E$)!iiyoo!DHQ9xYp!QdXjO#{t~(WYRG79|maJ(js6Y?pFtCFR zBZv~OAo*wC1}l}1_3fSVBIHR1f}g~>J-tlg5O_FBL7@H}#OabLblkCY(d;&mM}4_i zxo_?Xzv|HS$_e7#%W%3iJ}EIeQBcN{M0~TH6U~i8c->0Y#FuD+(8#uc6-`z31Wl)X z-^HsGNo&nq*-pMZy>yknOwT)u$eIgpUl z0j8Xf32W`S4OM4#(~qk#Q|Z%o%MsE~n& z#{WLh;Cw)P4gMAk%sF*%=S6<>Mr6MDdVSu*`#7Fcfux(T@29+h+L~$~82TWU7E$9z z7m(|VTsvTm9^y{cin@=~Agj?p!NS_!I=`dRif%Pu@Sd&VkdMg>uZ(VY*DQ|=DJV*A zq{Ig{Cnlf2d`g5C;MYdUq-*pAKvNb2bFOz1k`5_Gh`W9lxE;vnxmgwVhlBec{j9P( z+YFDwdzRwUDUVe@4~~k!eLHDd%lU(^UWP)P8)-(2ugn<%!BR!Zxjp5E!1xx8+h^K9uj|%?jqv5DHa2Jfiaw z=Vfz$iqQMdH0}`R#Wb`=7+ZG_w!r0)h$`1L<18*Y9!9dKyGymz0dXT_X0ujHO5%E{ z9vnrArHB%zdtb49GO36OlkIi9twudv^j#*O#NX1{p`3O^!$s&+$k5U{F2xgC!25~4 zu0$I-t6|0pn5`2_{!aI8ZqB z(;v?OI1(?_v-KhkBjTEaX+rW|{>YpB2r?n({6!Mel}gLaj#ltktMwhJtL-M% z+hB3!+xyaFwi6T~y;BZHS{=_l!inyfv%}hn1~E8f5Xac$6OtmvabbzURvS@VhasY+hnD2ZYV|(8r zg-58xcI||aP~=C?rt_lu%SsF;3~47`<W`mJMKRnn73;l zjJIXM&RGqcPjEh;^sX0;A`XuIV^3PwKS9QY3S%}#;QZv}jWDqgex>Bd(leCB%Ex|& zV)^K<{$)lvM$vgu5Y#`+rmCR__uE6==vU+3Z*cUqrT+{U*eW!vY@QKXqQc|Rt7lFC z-p1>iW|)$tu1xn$maFv9b>t9}>$GTaRIOLH{m@JO(IoCk(`9$Wi||8st7c4U9E=)A zUvuKJ1>zOVEjTU_<|rfS9783C&r^L>$skdqS9y{cB0CTeCWrnxp zrJe&3@qD~W`$n>rzN*Z)84Qmaj#VmPXNJo>X{F{-t2RB+3S{Y6<3`Oj3|E&$q6vTe zWa%Ivo*R9o3O{2_1F!s)ogOC_W0#3z{8H*qM|CDYA=*IU!CYQe&JWsCkioF zkhc8_R`ZD#krmn*S^b$nhsvq%eH|%MMyX(>T{QGq8iI@^hfg`9{je46FP6E`=FsSB z=*>HMe7x$#L19dA_H_7hSSR&$uf|xJNSdOGE1>LZ<%&K_I#BiY{F3P4MipQV-pvuA zr4?fKAzjp#%lazwL3=dMF3~W{9};zW%rD+NZ3rO5yVJ!7%4prZw&E*88^bp>ZqJ2o zBW8oWUF?J%=ky0Ms^0`;FGrKQ$_0HHRu8yau zESR<+TcT{g{IL3Bp0O`rAre8onQyaJ{pceokEiosn=q1r&n_uZNwc~odo#BnHJ2#xX>{}Xn!JA4jj`K_9sr*TeU8ZPPVl3li*o5*R1wOFyn=Hd0HA>Oq9&y1ne ze;=A?6#6>V;lNly*AMcxW&v5n<_5wgA`h9IErbV!UqN9OaK9pstmpE|jR`ej3c#O^ zUY?XHcn<<^N-RImHtwNMe;*&N@?<|`EYs$SFL&J{?#`(xrQ3rxlo)cKj`kSs#97&iM*JEvoADFp9aMSwG{aN41McUot!#+0 z<1Jwlxpl;@%r;TcLW@p=84$v^XHju<`=XS%;@QD3*6MW((CyY(H%Z}CGu@q*u-Cfi zxG3IP{)t95k>|%ei8P0!;FCXsefM=QD>u`h$7Kv;ulY<*7|gUX`UO!^zVP;NIVyNY zwPN0Eh@=w}GJEDT)~x08w%9yF$0UR2TSAPRdlMsu=!qB6PM3m?OTtE;T+6?NGzTg5 z*+iX`YL|y@Upmb88!;ohuQPQvMbexgv2raWA0>)IPUu6XmfEMG@*kWLp)6I_y9#;iP`5;3iwzW~4l>r(au zSyN^_>M^Kd*Ov0f=G~ZF-}cs79+p z<8N7JPs#Idv?RxuyuK>>wgkwM_aXU1iyp}VEVIVX3T%}_&br-Jm#M)C#^q<#xuvs` zw;HuXe5nZnS2k`JxzHT|xu4?%&}=R2y#2f0_Q?&C?&4(_&0`_Zr@ubSf+zQn8WE7TX{`{53Av-ZXE*^owMgSR~477;6PgPfGx6uRoGI39P%4!v+#%}ivk(O7X` z`w3^)4DWa!cc6~0$cnzrk`1YFYA~sQH8r5gw{U9{MXIh1I|LL7fik(?KEuGjDIOdb zK%4!}X4*sj%uj$mB;_ZHP_6CJ&Nk)x_TFaVC^o6-7Pd~@7|9*u){P1>W2oQOTj53sY_mghvyR`t|vb~7Mp)h>Ba1q+WCiu&soa-C8|{nW16{sj4kQ7 zv!o+k^)#3mnUtt|%=qIH1}Rk*pff$C9{GL01qkp1?`OP~ughV4E>cAz zSGa!!NTObeJV=IPX9WnimgqDJRX87ZGvQIpW0KYNhMnc4QB3_MxrPYUIr}WQi?VC(L0X zQfL4Or1U5(eA#NP;2hc*n2w?R zHgPx!fx+N!mu@yge;*JlcF&hg(^m7#8%I>d?j!dF+T{rJ89WG}{=qNZQt$3JBIUeA zr&QCe?{1Z^et`!jD!yw5Upw7giUYfegbh+P`i#$}Xk#>WVGkdMX-yZE8eydy~EfE(_d&*RwgOvm>cBA_f?Bs4$>iK6>dRY^xp2+(x>46GlzbR!jrTg8Of z@;d@njGMeuM>_Jut@Xg+7&+g!dL6ID0m^zFCq8oim+(HTJ!|-MwwMgDB!5t7t0zRN z(E2vt@EkH$#>zYWSHiXgKCyjYeG9*Z$*qO6ZmIb-XKrqeT7SdVYHS2`CrXvB@ zt1|HE;ZA>SL2LO7gCD>HYZEqpU3K7?3_n|*9Ct4>ti=N@zi`{=IkeD$X-&E+WT(;Mu8+Mjk{ zB=J0^U^Sa;QXF;2cRb)L=~`1zuRY?R@yYf0;%<(~JZW8*7NAysa>mp}E){rzwfLl`=Ee01u)HrSOf)TC5%80Y|b zr~V}*F^!-0bN7!@g@yeE(fJkLVYx^S@h>hIW1ktn$|{(gECX?oV{r`d&)Nh{`U4q` z&G~E&4zXM56P*e$7!7Q-eQvt5@#EpX?9@2~ua z2g8d{OD&let{VNtD$!|FJB+J>GkcYFU(~w3Ab@p-Y?j+$mwjG8D(i`1&tR|r%$x$5 zCGApusVbv9gzP@Ldh&dhzGTc5lSwz54Vr9O!TOIj!{rRY?ilw@)#mhg!^2L`;M4sw zT`*N3>~pqgaWgvu?40HjjPoQT+eG>nYs)+31GF`5Ms;^pCTR>?mUAU3&zO+ z=7ET=?%&J4?vKSkG^;NhQFStL_s-JRPaoZgyO%t&^CH)k$v#j07+}ezXpTvIl^THA zELqo|;B;wEqLWG(jcqem!_&a9oE|k76y681n6VGIE5uOxqtVC)NIASuBFOw+A4RKT zGUO(X-H2IO*^T`S~i%MbA zUMI&Z@j1tex5ab3;ya-z@e&+pzE1zZi*V7Q$_A%?yiwJ2)IB}*EY$X3OGAmf#^dhR z@U~lSqx^+rb~yP8q3zDeDX=`h28z75Q-BIfx8(s@F)pz43(-pzOEs zSk1{=TArwPTDG<99RgO+oyxjAoS&noDz>B5K|enBY{YR;e&D24WK{01o%YH|J^D8H z6&PFxNAj(zov;b-i zZ<25Mvy7;F)D@0ORzvY+?wU?_;yYonsrL9U1I(|Q8|_jiY8*m?rw9XAUfM5!QWte0Uz2&AgFdys5dr8%lN?2SlC1LF1uBp!83*5>8?RogYEtO3x8Og1|hC0ds>36D%tti)R8=P1L;cL+F-=WMVMM8Qn&bV?>+ zj=s3BD^5 zRcYX>vA=bv%2o;Yw*q`UD+G;(-Awvf;a}-{#^o3c%`_QUBd5EDRD2_}$VdBd1q(qD zssz2S+A&zNZ?YBN4DgFMJ85Gam-^)wzd7I%@5#e47+W9$#-*O8#B>**={}ss&xyhz z{H2Ns2B5=p)s$RV%d`EG(gN+sSu-4+*U}59^NAV&(@mWjfo(&av-dB|J*1b^PjPV0|9(*EkL6UBl z8M7e}8zua|LKy|vxH+i6P8i#Heu@fO?duwfA6@|sq}UBr<_0$%c!1sBjp{r}$D-4) zpqZMphtub;fLyle#P{kViYL6*zDanu@Cuwzx@z~8*RW?EXU^=@$bhZIwlws0=Gfot z@J1@uWsP*NQjH*B`Kg3_!vP#95)bZW@aKvjXvRYJw}mbFGXlf0Z%cePGd}9l$@P=~ z>#JrTHl4-6`e`=pb7MaX8O78OL$wRrZAB7hdn8eQL{jy(EC# z^(7>_&O#Hn6m=`p`(17yA6oKq8~I;&yv5l9E3N{K6I zScou^&j_Dayp|P=Z&av=Un&7~*;`;YrVl^fIImKfGb4k12%M?sUIo*-%DSxAS>{BSj^yB4-zH72KPJ7^j^yHoO4{h?Ef=$wneG?IG+;eoQiR zzH;1MGQ<;5OfknsdHVNTi~RjOd;AC!qK24oXiSG6YTMuHsJBda@GAi)M#4$+mHCPZ zO9g=LuQOsdKRmAA zVCUO3#Z+H@rJhUuydkecni7?2W_@Zi@Agi!06RVqY1j)44to2RH*kpe{1= z?6vVqJVyJLZn`z&J?(nz#qOdRQPC0ikTxw|mz!&pq0^yp@GjW+;7>-P=Cc6e?5`8| zW3Ybfmk>HM-gDZ7rz`R*=1JT8TPULCRs<{VtGX8OW{#NmYUxvM@oaA{hKBHJN-=LFlZn6MzN^i-Z+X;+MP z&l~<9n{ES|F2}}=q^$H!ny4pvwPs5wbc@a+0Ci0J@*q*7&8~|+oD+4@-B&Ktu_r)V zJkIxZR3!f$PQd3S?dOS4I{t0%(RkROkY14rAo>+(w|`IevR?Hlv&Jrm5)yS}7%vLX zU#1?3g^Wvv{vb)$s-*80b-ZB$T30kz5}k0;gfv@gIY<29Qq~7>rXZ}~-=#+hS$jVe zP>Guh1aXS*?Mcf8OfG&NQtrJ_?g^j+nV}R$9+4>pGab6f?2pJK)v! z3hie?KDlYg*G&z}MRTkf&U?D3Kg~A0@wJ^;6p(rC08#B4Vt`FPf1t0KvzqTZ?|2I; z)8SUi9``<(TD`HZ6%gGvS zwvwHw{qR?d%Lc#CeY2nBH|1J|hkK)k(26+S+aW*>#f{velLM3sKd#~eBS1oQA%vZUs4-B2{K<6_$q)8Dsa5h|=sXvtRcl;>4k1vzE?WHa=(V8J% z!@uINmR7_dhauDD7LQ5I8myG-+%4J6PL@;DZ%Lma_mB+roZtkQfdHzbi>CX{tIhK# zb9hXORe&ODzIQ)T2@asEdMbi5Y#&jS6(UpXgBA1Y zr0eSep`G{Po;6t5DE5Ar z#_tUhgxDf^3GuViUyt~@H%q5C-j-#w7Zn4HB1H|)qT$A1ppswV8;cnW#lkspQDy9> znh!82MU7dSJopADq5&k%T$(GNDq}`7Z%~eK ziPmGPv3jarC3|qc;@wwy} z6^6%2PFAyro8*g@nO2S2Oq)cW4k3nuiV-u(2? ztu+NSkBT<#<`w5V&G)wq?Pt#|*$b22nRxf>MgSH|-9bB8(Nx>&Z?iI6XlovMs4s1pQsfQu&=yG^JQ4|fEBc}@Hn|2IbIcD;DEdEP6~s_ zweQ&0-vc|jGA&MHbyU>i(Suen6d(@yhpU<K%1NJo<=%unxx#P? zkx+BZV-e6h|CDm8Cop72S4lUT4COg|f=qF69H9b|pR|1zsxKd4V?fu(s0D#cb5Xa3 z2mGB+Oj!xeAV=>iT%V;|7|ZGln!MZ+=x?C_+B#uJ7WH!V{a&8kL=K>2(G4A`(RS=y zDC~giZNZMOlbjZDmHmQKTnRNL*@ z=(`%2cjFHBP2VT!0Z72_YqgnK#(ekZTqU=AxW!AP^R)_$F82B0sNmwd^n1ZVMa_iz zHB;?vV`h9E*CQICq5I7{7D%_6O{dY%vQ78RGwYV;ZAPhVb`ak6y3s2ipja-GEpO1p z!V&xxSW+g-o&fb7<26P>4fYUL64C{d>OZX?Qg(}J;|4+~e1^Nqrvz$BrVh23E;|RX z8{F;1|CuFUBC-q6Pum~ie_HpbuXqE>Q_@ZMUcwIQ3TzhOys3ZUes$~@{cN7)^Pby{ z_dBSNl#&d=rnOmXST?uN`N!QAaq&FS?sj6JRl|ZDe|3v=+TA^r|A<0-HVkB=P=$0_ zC6b=(9?j z2X169TW0fFE9CTI9?Z=$gP|sh`D!ba;XK$Zw(koV(e0PME2kqED-Tf}e!6W{%6IjB z03@>o_z$f{R{{_~w(1}Fk)8pFB}+YZ)NCzJz^}WA{5=&GWgNoiuLztz_f&Od+7W<< z$j^}PV$sy)NvC11RJRfYXiTcQ3{Ma23><_$V|TNMb?F|cb(f$)> zCe`?JhUJ@uOxlc)JC_NIbv&v1n}Az0+9wB`gqN$h9n-F%KGLx^rjd#I*Mbknv1W_x zC(CJlfTwBJUdjMy0aMv8E-NSVURg@P*9EyAfQ!i{@Y?GZqDIYEBQb0K!XWlcjm+P1zXajg%;@YY^)@w%%HE{^r)nHJ<*~{6{ z0%$HdeFmTYTwNoVI(x89fW&ka{vuS?*m|%nU>k8Tm4g8p{mx%cjn?U@&y@Bp*F{G} zLW+rxrJR>2cKB<`f~y%uj`>>CB6CdQLz;4Stm${qI!m7!jbs{yRJYLXz;_A5(QN8+ z%!H0T1Go@6Sxs*=aqlq6N!?)Ap4w19Y{c$Z7b_Prf(u{t?^{s(5FhJB@-zGun!SOp zooH;@7?JboMe{kOs!!5auRUbzylgyTa(}!)J&{o9Ysh~Nc4@!WKFb=jWd1;L&;)p2 ztft=xJXgPSBCKtgO8g28pEAXAZNjVPhlf?SmJNgLXl3%*is!~RS~tsBJ>Q|e(&?_N zc&=CEUHkHaCSbwHuQ(M>xOhe|QAcFLVjM5HIr4_&F)MFh`%5i>^XVr5 znQSht!S4pxELZt!Kg@m~N7j9=>0(Y_c}_pFJMjFK#Z%WS7H&Rck*g4Xy}#@LpHd69 zys4ciJ>s~Gl}r_;D``4Wy`F5kAq>SiY0|$)5u0yvW-=(#5ww=YF=EE|e0-zOZbx&8 zf6ln*bYL-OiTGM7bVH-wy(cS`kYowe${04QTF+UMVY1+b$t!<@FCD$I)|Jd-!onsK z5I{4J+^usu$uG)(FMuYQrzoHH6EN|pw+TRN%9*U4486~JfZzN6scGn!Z#atVXql8l7X%iQRPNJ%!o}fjPZ>8 zw?eF6*2?^7{0}=&*`!hi&`mM!I@~qY{~DYBPj2ylT|D*w*NgtDBIBVbAML$@!H@T% zdkq-ixCzKo$bcu>up8>rm-;^**8kDxg@`KA^%M=EIMl?MF#ps9#)m9qdek%ch7=V@ zqMcm9d34czQCSG4cnzJRlwqJsPub2A^Z!FZoXWw>)<1P{1&0=_lSmlE%Wse9ZAN+< z*^&Ve9Rim7PXXY^O;HG>N0eERbNDM4(^l zbXJ<@+baSk(uUynUHoIlpu}>v7T)D9qINDD0S8ytViQ?~FOCK&%(f)HJ!W_ezGi?$ zvwcjWYUQcuUan{@YS~%nwBTOxkAb(_d3Z6lWTbX*4*IlJFJ1-OJ0bh~u3yeKtnV5Z9uhwK$HzMhd7+rKZVr4dd!S||A(AY>!%ER6Bm#>M!s z71F!}rw_^&BR;^v=@FzSC0Ifz{?G z+7kymf^Vl?Dw||HU5KM%5OT%IEnWyFQ0 z^^i4}O%T);6P;wD3FgM?$bT>O`NZ^KZ@Zq4r6rJJI%nsfdN2;g7f`m?Gr6jsHO05v zpTi>3jWJ=dZHA$m0OR{ilWxKUYTS!_9^}~A?s6o$XM=&k-+^009fQk)nAr^3UuF=| z`Y^*XbB&w*=2+z4fvXpcH)|g)hb4QqC8_iLwf@aD5z6VoBc~hb`3zY}%G!WA#cp>^ z`Zzs9c_a9v%TKbbk!sQ}t?0Y?;&F!>1L^1VSY$j!g6FmU^w#ptpFir7zfPPc(sbQK zr@m3_g&AVBM-tz&x-wONTu{H!4;wlZg2*%BLkhmhas+=5BV3oaY?!;gM-mt49`Due zU0(t#KCA`jKBF@`hXl=433bXvaQe#zQJ`CuE~ zW-km@q&3D{B|5?k0v!rG2i~2g)2*m^2`pf@%NLe^N0$--3F7Yy=ux_>TZ_RigOZH6 zW5JBoUe1zVc$RG`Dx9!wzy;ngJl>=A3LnIj$`6b`o#>x7@-ALBs&lG);@(c6!S$O!nKZqK zAT;hY;c@8x3Rk~eGhZp#8by0uRo8H?PGRCML@M4pnrP~uT3g3xGv+^>tQN~BHlj-3 zcNht6T9I7Sy)*W&gc!A+h>nJf?cxmA=k#qMHQpn!+AFUynF`fRtL8)Dml8twfv}Ls`mu(1o%NOS|~ETu2OB* zk>UAog5KEa*>El+`L`hjOPc!dPP@}EZYIJ{@9%=g*&5&q8J)k&yk$$Ueorq|7^~lZ zvvF3a&+}O1XdjWHNeuM-aT|`B0EPHysO@b=>r~pq;cM|@NIyLW@yhr~>H7itNtk5u zp5q}g>nbQxpZkfAmaaz~A~t_E>y-6ojNRPX z1(%PP5hn$smXo*=&Osr`%1@#I5onlvZZOX_BK|O?-qXuwZ0g4qr{+Dl5AYUrPw21L z3(Y@QNV4#=kwNKN#Hvptyfi6sqd>i97J>CkHJ{j8(HnT!?H%y^m@yixrM}z**3`sH z!=}c+_aX`NA!#yq@p_Zoaw{Z*;FW;!dfS-qei9*r_N{q^HTI4^pWvlFiEHv%pGFI# zj|*~Ic3Hlmw;mQAZX?3lOIOWR-C#V&*`6t${EY#hm(MWsTx>K>KBw*5^sTbJntI zvw|CcBRSMwca1OTCE;FsXMUt6+ruvl(oL>N!q5&0G%QEZh%C1p>-8m71lAqgl<14U zO>&HdXAJZ{V?Ggrjbx4Ckf)uv%2`jH>I49&oxSx2(fexV3whpqcO(o)6 zMe6$5R|04L8MYPw!SvJCZDygL-L=llG#)FT)@6Ed{bzbrm!~Ax5}%O_aKV(^E!SQi zLB;06w~3e_atEb}mm&FS>Z{i2!yTwhf)q^}PZz@f01*|t5t8c!qNCQkrz6f*UZ9Xo zUaxEQ;A;$xF{6s`+%2YBow4DN1OjJ0x03xbtGw2-RktQ2siCq?&$GU`9dk97;I}po z?58nCyV$BE=*FKbEi&RPvhQ!+BaD zbXP9CEpca3twLW7y!u;<59^6d`HggT5qrZ*3&TVHUPwe8NXI3vlsKXJf7#vuFs5=d z881RtMd{2I_>BlO2v)E&{`mE0pU@lhPwM_1-vHUfM7!@oZ41?aAD>Th?=#X(N{P-B!fg>-cg{@Px=%y!v zn+mg15@=bNW37W>J|qt%5oOF!=m5(xrM}i_`IOc<;+PYI0@4)tTA{wOy6B%A)ZHq2 zL|-yiqu%)3bx+$viqXC*xcO~1P2=YW{CO#(Pgon5?a4V;#vZ01K|SbFGj`u_!s-~? z<$I5DQ$jnt+zL)3GbzHg0qx#@fAc^&&`nLPn?*}&`?&wK&}GAX@KYQ#xRV-dH!QAj zR)Q!)iQ!PJlcORVhuk|9JtDJS0zfLP*NcCZeBoZgYPiK8RVmUjf~TIRgeNRZqDKy& z70GzQ!%OmXX?k|7Ch2bKsMweT>M+~)SZI3m)oVuDH-m*Rh_#5RvA&TMuEJ+zn=5p7 zg~)E+*jhW?oqmAS*cSZo;loXxdbbMfnE?R}*r5gnvf|dG;}{Xd+IF?Mwb^h0P%5C# zUJOW&ZwMPYShTFzN+eZU=YSU8uOYMI(;ds8FG>d6z0CI1u5fWSGealVn=GaIxAaur zec!+HuFy*d67y?v@0q&etmcK@n={USUv2*fil>o^c%=6pzR8_tn%U8V1-_K@raKuM zAGpR;8Dv^C_o-897}YIyy`9rXr&k+?~kYglX8m&W%Hw|>wV zBO4;K4L>|W?0#)bqDcj}ga?9K@{cn4fh>spJloA<`hOpTgO7iTB^wLMAWu z_SKem2v1t@JD>aw03;T#Y*DlyU0^r2Rbf9uz32G*egQhCWVk^%DZ*spChtNC_A*K9qc>|lKGF$hihzft#> zZCQ0~*eHyI(%lUb(j}#Yw9=gd(%m7AbV*2er*wD6Nq09QA>AEoaNW=IzUwcnZSw;P zPUakO9D9v{?i`W~%kp=Sf5d znU|u>=<%1H*=n##^Q%l&bk?PzAaa_v%<|*CpfQ&1I>d08%$|T2FE8ian&f^Jav}FP zmI1D47w7t{z;vQ#VJx zbEc3JSa-T<_5p|8>VVG0ML#@=q@UZ(*A>OY|G$l~RS>3Sqy5-B^>fAAD&q`q*b@c& zO%?zE8~v%ufAchKjGRTgLOe>+G2dK&!+Alkej7TrXqfG5FF5~b0qWqX@9w=0AO`by z3eZ;`DgJjNd=tB`Y$hJoKiO5bw`BzCFBn3$DOrl&t@(q_VCU#{N_-M7?nY4+)?B#M zBFrSx&eFx|-B0~D-aKtgjytreR!Zj5B(mkb-6yBhx?^?8aOsP0`8SDi=Oi$*YibKe z`?qG|E{GUaygqvgh(VOHq-bcd@|VvXF+uXTTerCkm~ z`q&_!8kkXnSarDIt$2n%7hO~EYap+2*zv3!bCf+2P;%3K?T!Dy8Uy?c^4#c>9DcQbY^0^wFAqtW7t(!T=t)IR_Jdn;n26!nfBAz$?fEs~xr`;*OJ z;_9Yvx~kEmvKTfZ)W+F?)yt5qjZAKyX^-iX4$FXlUEvS}DGBz0GSO|A30C6oVp zzFpwUFJ1}lpfKYYX{7fhSpBjGg_1{D(LF5w`WYSkkuUXRFbNE++(=3Ip2uOSI=!$l!>}o<~u}3SDj{S|1Jl=>u2{y(Pj?Rtr03s zcWu`(>LY!mK+zjBGKo+5$q}d3#hemBM`pF7)qjxY?Zhriv(8yJ@oArT+rGSe13E1} zA?8*fc#_J+Q}&9ZFj(e|c0e554Iwd484F8{sC+72TuKY;Kdt=XD`-@_-O3TdyR9?b zy>bhMvs*Id%b_=iV5xxh@~|iHkPv+HZ6WFKH_0XILpI9q?-N1E&a)X$gl>G(dwM_g z@GQrj+u%NwGt+*b)zYD{viL~SNU(s_Hh5)aRp8f`BctweA?G@5ZkBz> zhooFN*Q>r`ZYnS%Z5>^eymDb=Y{C(MDcW{li6}G`^#Rie-Mc9>s+V9xF0^O+xg7TNL6I#HtPd76xm&L=jK>&^^cG{)v@~ zah{#(WmclHX)vTV!0rJOGQ7E6%Q@lbi_n5rkzg_;)n^Ru z(%WUWt5~VhrHVh4m-JLKcp4wWQF>t`;1L>WUWQ#vU;TB>PZ!pT(H!e$Vsp)}V5&xj zPm1^LC+w36G^II3E`r`uJqjXZd{IGZ?7Y@0fDN8}5{w&u)l|X^87{HEC7vd;^uO~9 zM<+}5xmkA%p3G$}%zPd8L6co-vD*VS7&U4oFe;(x5m`gX`Zj-YcdZ7~|5RVp`?DXb z4y=`u0OP(H8@Ec85h9g~y9QS#>N!Toy*zTV%;;tPyElbXbLdWYgu?U#chY}#j3(>@ zdD|LjtF2EO%AZk(?lf78`gC07TZ;uiISb-Z8iYMM^KZxQu#l@!JeH94kYOeaXBW>= z=(|l%xnSZLqldD~nA)g2`bey*UKwFQa5`?5gtPc;6*Ax9m`fd+l?4f~Q*qAj1C?W= z^2_O^)Dox!V}#Z4)qRZ7_w)7r^7ClLT3v4`jb`r0ywsbo$y{39-d&&G^r-V)UUn$y zqLnP&qtPHINBqmd6P6M=^-5}|DecgWQSJ9+e8-56nnQ%wJN{BTgwfC0P(~fP$33fS zrJ)jKCP=RdwJ%Q}R&d`DOTQ(rKV$htln2Y=8oQ(rkp6){5V*(C*ObE zurtzS(Dj2!L3MMnV~ppGr;A#@t3kh5QN!rts?vImZB-8SXTBuj90L|FQ1~Vo*`@YLtF|*d~o#@f-XJ* zV|6_*{}<$AzcUm0tr>RjeeB2tT7#|Bk@yQHths`K4$sH$Q~^6_CCqG$Fgt;;oemPQ z%hA@Mu!54EGhZk`P*;h(^HNa6b|=H;GyFjFTB^Q4UborTie)CKTURfh*E>OPHi`?* zzAR2EQQ2x-pghePnk-a*q3&qi+OXBSC;9hfu$2Wgu4SE*aGVMgsk`_Pq;9{HPF@A$ zf319&mDu1gdbgRSt2XUil6+Jvc+&Bf%!-TB`iHMy#S0zz=!PjjLZVEGiZMcVF6 zpimVUKSm*--v6vby6?PfKy%@{I~IK@6_a>im6-&22-V8o70wJn;b`sl#~QrcB+Jpj z2|reX3DmwP>+5J~x@1|_fc(x#&vwE)z7tL!^t|e~e39Y5;N6)&Re#B^C@knrKK<#I z5qLd^wf>zh-E~zWfl4+07?zh{r#DH_g)FplNXf2nW(tby&`BYyF+%$B^1_gFmJZQx?3x?%bia{h z)kOIa>jQ@q2^Xnm#5s4Q4`kC|;UXNn0t=M%T+3h3KG)IcE-VON1>P*uc3&vj2__+bn8rb`#H<$KH&4 zYTM3r{%nlmVvmP)Y=>Z%K>u)=q)@Gu;>Lrs#%49IAxQEC+bVv;{Dn>TD+%#hg5j+@ zjXI7Qf_^SFHuH_5C}lh)wWNP+AtiiUu7(suWxz{EouCM_G2{u6L5ZAh8relw5h3)5 ze>5P$V*BG;G2dOZ-_*BC@jN1-U____5>~d3Z?iJf9^!lSF>q~?UVVvJs+7uqPO<4o zpQ_N=3^Oooht+FB&)N{Gs4IcBD&>nV`42!vt)T|xe8v0_oj3gYu*ettrp-HIx8WTX zRj-he2-Y@@YDJMeNnB}h{MCtad=z$&oUR}_`v>BQQ-yKmIscGf1bcI)unZ3Sl>4;r zDe+$kJq3q^iB^#l+%E=|Mb*7?Fs~--&s}$=KDRg|w`}b|M5*CFtR##6ya=!R> zp>8*B;N5M4^*b5nNNe%~clfEg;x)vaJ8Ak}44e%AT%MaZZ9*=}_=<`#om=*L=9Z`o z92V2%dPoAgrTur9VPf-+F4Pep4t>-ap%c1$`vtHnP4_eliw9^*UTQKJ_rImmaEAm* z{~Fw``IuJ(LbzX*lj9EQhANaQGF=?ooSILXQp@^#B$gbexHDjfs=p2hlIs2&+X6=MQSin7?C#k z2wIIh8L`u%-+g+0R6KL6J8D9=SgHBtc%G9S@;^4*ZJKzvy<#2`hs7cDv5ba?gr-ee z+%VgLt@q3;9}w2A1rW!kW~T$S{bqOPjPQvM-9^I7H4mqkQFZ2hMDUzhtwNN2W9CqW zwWQSa{(XFTK+t*gjc822dwO6=)sG1|eti-k!xV4me{D()|6E}Zav zU9sZhlGgbJ*l^p9GdK=^<5*;xJ;vwpWM|`$;snx31$GIZ$h7SsPQtqE9)bRXC~W8dXN{ckGUQxwq_^Azb+k2Qj^)UJm3jP>*b zUe~5{^o%2eMuefD=W?_YaPhN*3hq}WMq-mA&+PDu4+Vc;?8W%jqP-ywYU&aVN3pe~ zX=u|7VaM_`V)CA5FnGwYdO3`Yn;h9cp5;r4)z*|(kLxxE!6yS+uB z{E^MG@#Q)ubLAow2LmbmT>?r9UQ)d^k@d$rHh~}ySg2spBwKsl@av36ThDfhq{I=g zqn&}K)A2^9F-2mT2}xO*u8M_-lDy|YH*r&oIfU8xxNpXmub30U0HTA1z1{rBfJQ|H zfq~NhlT)F;x|C z&*pdliObeAab0bSzU8wZ;&gpr6AkhO@>bd1`1H7MWtN#wnbdz9rvJD^+0WVJnt{i| z4sS|zv)JG=)e3*wDz0Rmepvr1kIwiW6Og-Yar>OI(S)O0T z6{GQ=Gr=0AH>_WprgQiDQI3=akJxW=>YApy!x8hHxKpBj&A^N)l1L)-5O$R#H3HoH zEp;oCdWPOfxi4QHo=L6A8Kzpx>U8KL@^@`{h*hiFxnHrM6JmVLHjr1F>PY5YIz6x_ zA3e79nahiuf4F{SYGxX@%k$r2fxlIW)f_*%yP+xOx0?1b`v)g0(1?V+5$GgmbG zqju-HF@siodF`jf&`yr# zj)g<}kalA%5svv88x>>v*Z*}Y^s{RExJ+&OxIY6(-1$~7#f_6Fp-)~imR#n?9GTAg zTXRDuykkN1HsmZZBwLHYA&W!w{gMEjO+X!wlW?|q*Y*%l2 z+9bS(HW3G_K_e&HJ~kfw@g*0NxTylB^z~d<%Vo^^)di|QqIq3{7^pDAbC^Z2MP0Ek zrRUCIo>r*Qr_vC)nAAZVsL0&D;DXlma>>gs=$M0hYlabYSwF}efsY;UH5i6f~u zY_VYdy5aJKkWaU%`?@u9QffXA^Gn-~a>Z}|{d+R~(^-v%>z*Xur@=L%@`u(}P@GnK z*&^pa|WByRkntWl?fa}H@-e$HR}F&_N% z#}9P}uPuf`;r0rs{;qbDX5e2BG5*>)y_s*`L%vE|_~~g<&#r<19=>}NYrpcm$1RaC zDM$N!ba%RnXmOGEla^9*tc1+}CRia?+4pL9{qg*80aw@k2v%dH zR${zJtt6+OnJTKP@oYb~(RxRt75%k=xnTrM@mMGL8REtUq@&fc5+ayax~mlHLmv+W z>v3oTjd$lKF+|)W{HzZ1>91$HW=X$CxNcSr5u~;_qm%hPBta%QCQohxh)COQy9c$s zv%vo0S-kEWPu~PJKFeOWm+smHx~)YFu2?efeR4Kr^NiJVTKMAl1GEIoVc$&iPNL?) zr`g%CA!ZZXg)L-2xhA^mm|ixM^_3@LY^2ZsmX)H1B(>?7j&a(d+M)p?rjhN){kY_N?%;+)T&(7_7$+PdtP$6@hh9!!_1TX}IdSnq zgWiZdL##xYXmGs8252f^R_4RE-r*4d6Secarf1%>f})7L&q=B)EZBK@D$^j)7&3RP z`-$?g^22&}LjBFj7SmPwf^Q{EzW#Qim=O(uMo!a+?RKAD{v*vgHL>RPJAnHl?YC#u z*{xu3A1hyv=?M0%K)CJJ&T)(XYP*ROf|B$ntDb>NL%Ac`jA%Shrd*Mj^M3$=ssOA< zYyJLojmU^F#&A9CFflFTb>DLYnr}AB5L7(p<<2yN)kHx%-o^L-$gwK$sA2d;UNE4$ zYHHWezOJ45If^tfpNMheT@0S}?#Eh^79D;@TU{=k-v)yfyta#;WMj;4^oO!Vc4zF! z8Fv*&p)+mAuFbLHitH|Y0-BuGkvJUOn-i|y%O4H`0%Fv% zzO>?&oVSSu>hD!y%)2={(S^tUZ;qn1ieL}#nqH4jO_(gUJ&jS|hDjQ{6Wd$Zhcwvl zFist9vf|@?Z_IvyQS79gZ697PF6Ld*>o-+i>9Am@I&z^bglJqs80Mebz*m+{Y#^fa z@?=X1#JxG^Pl+7XZ2RsHS2-;4Rw+umUZK9VMq9m0F#o;m>7#YLJJJAq@xsVT|!@^l|l_C#zVAb?TJjw(~ps|D;r2s(2k8--*3X z^*v5pIaUI?(mLDUClBnab2a>KcbQLlXI#?vQo9Av+O+&{QINP~#r7#`+3a;#fB+Yk zR1B%KXZYXL3dgFM3%N&LJ<9hs5l(qMZoqMoXl6}v<|@+hwH)KPAb-$oDxqFJ7HDNY zsf8lv;PAWsK&^5M#YjPix)f2WP*e^&wzYsQ{rKLG<*|vW0r$wqX1Vgyr|OKs$Nrpz zeeo}56BDz&si#sJ5ZtGaIAF_w4A4`CPRPghjwGqw`Jhg2&2LQhK1l1Z`y=3@ zYR~(CWfm{3g@)oZ!k6)UxrE2K#)cQ^LAt)ahy-eD%AmozEl)-13O4vRC@7CLLCDbe zvGE3LhZ$Y;AA_?hr(m1^DDj*W@Y1~P)Ty74Smorx2px~zh5>Kw7hEp1dbM9pn8a!Sa>Fp6>D3(^L$S31nicVwl=+ZV(kUI}Z%YGvGwbJy6#%$-EG+Nm`>%wyyl4zMOvyu+|O=*J_sVA zo$Ut2ep@}~!-%cjoOVFx3P2XNBbU%qv$K<(9c6FF8)f9)mn{(|+YdGvg|eS?48gsC zBRF3$&ylAv1p|i#H9gcl-wwLG*ozr|tCsi(p!!WzDSV9OsDGVN9@w7<-{NwQEz4Y> zu~U3HQ|UQ?0Xn5Z<1^AI?a5d?f%MXH4`%U>rNq5)T2k`AkO2e$GYHVqo4)N#8n9Di zy&?y^3uxPK1=!+wRTrA*1Fw^R)X99zcH3G6>y>>hOeXl{sVk3%aScPOyygK7=(vrd zrH?Ppp0pU~mzE%ra0n-j1qbe29Y*kTBR1Fs>d)cB>nd(M)s2 z4=Q+dRv#PhHjWB)KWg1sXJD~q95pvH>z7ms2Wtl1(<3%`;64XNCYkr+I7F{P($jbd z+vr(?b-RT_D@|cF+qY{D3j-&x_Wr5!Flf$@R7q{>lhOWfR(4jOUu|rl zQf2GEJz=}ySA?T0ak%F}?%Ni7GbF9wmZ}IlkXU7ipMK|7`0&+A#_B-bjc;sBR0klN z@#8U6_?xo`C%*gh1j^6GaDQmIwI%jxNdD>!b>EY!9z97}uR@7<8USzz3%sOV^M4df zBEeB7-(Um0^dB=60WE_}<^Znq-A6}mDa%RSX}#La!hCZp96f-$<^R-sf;iFfQ4Sj} ztALg_B{DM;nNA3 zkK>7rlGaDS%@Vowc%Ra%@}*w`j5re1mKM~})q~hZ=3&^j6FR@-)b$HRZM?!c2Kstr z=X4_fi$nu|vtQ3&GVZP_L2t8v!n3!U$sZ7H(3RB~jurC4sKuCV>!j$5+JD$zp z_MOn5P-PFhA!8Y=K%TuCA~o%?>Z%SH4eDV{PV+YqXV39A*SV6sgWGBQYEO-Z4 zokh(Lrb{(gZZ*e2qgHOh+A@Bw2YV|H!;GF8qax~g3un=EgB=EB+%9d7L30dfuwE$N zM*hNR@WKMQCVV#hW2Se49-P-P5PLSB4wwNH^uH#O6YWn@nVIAK^q3qDRs!0^-g|gJ zoPebFo~nSRvv>2u5x-sFPH_;s#u%u<>!xdOT0fuw(xNIhYXdr1#Mi~}{cC3O_mqj3 ziIrJ&FE9>(_3<)SJk_%%@M4$UFbRR{joUK{9q*H08jnzhrFEu?<-BfA#Qt^8X^7f^ zvOs-d~^ZPj&7T>q)oM-B?T_CL7WI*SXtM*Nk0TD8UrIEO2Hv+`iW|K(}J} zCHSz3Wsv0d=e3(SD|aR1Ni7hIDYLtOr6$7z6%(H*8Fz;DJT{6rGO^*^XDa>OeM0Od zcOXupr=a_RMP_PlYIGTw!*xH#%*%=Jiwz^{bI*U*ox{C;zK-eM==e1DSuX-w1z{=x z$iTw(lF%29D0=#a`O_$czJNE8Q}N-?#^cc468{I+Z;im#)iGb#jLRy@f&*2RI=y8O zfI;!sG-CElq>*TV-hnSv@b?Jf`jNRKAwi8HMp@RDu|{qm?x=P0N9V_D)WN}TGt(?z z6>6xs(1*pwqSKin5f}gSI6)DKVh{bl0S{nw z{LIcw`#-P^o-XJA{a@bwfBe7P#z=n<)UR z2^&iD)YLlaS?l|VJ0)j{y>&lnXrbH>dr{D_Yhu_9fO3jRw)EW@3GOggxk0cw4B%2I zsL~WiQ9!|gmX!YvCP8}?3W|rAIq(8siSaR(W@%BzZtNU)Up>Z+Nz&}SV9*;K=HSq9 zD5!bv{hxrx=J8P~5>yFiPU>&!k;vtsxxep6{jNo*C z>Pv$>3sDWmx<@pIQ0Ip~AiZ^yV^r1!3-(Mx=p(Q)^sbA-UAB%sMP-Du3|<0yDLy*^8N3AwR8SeQv{ta;hOIg=n^L`e2MtUaaU1; zEo&PNLVv^Le)jr{!JOW8BW>dYPo#M}ZK6;)y8L>TviC-oK*zr;eC#9;~@C^`U;w+2kd(S>>4cL&!m4aC233}nXCP};Ga&iT{b1gAK*x5X+(Eubc ziMZTlg+ih&7j_LpWZ#h^YJ_50*{!e2WP)2G6! z0Ft=g$TG?9oa?*&mesV^dW-#VH~VXXw#OkHQ0qXWKuui>G<%DOy?co%3_m~wk~IG4 za3DJX_-Mjs-Oy;(=ry(9gs86NuDa4XZe&kaj;@PGf)N&gH2MbD-gc>3pfh2D!>u3Ir1-c`X4? zA?o@v1i{dDA(c^*dDPQ*BZauI3HSR?ggJSq?c71CE0rc&bi^9o2mom)@A1YBE`3v8 z8<74;#Rs3^yS8)RBCYTmHEpf4=3||s=-Q~rnFlI>bd$@Iix{m}XoHNT#PI&NK@hW*TNc3FdHl+GRC!b{ zLZ?fcM{q@d`3)&aMC>%x?mxng!4ke?PE!<#dx1jb)qMF!QPF_TtqGJHxE4`)Q}P## zg&Fp(&;-^2OKQ4&0l$=RA;+$DTYj?`uYnAB4++QV8({EPJ>9~CcWpl7Q2Bw@G~Exa zYP_X4SHY>tZ5;^=DeOJYxSI8fu@_P6Wv;JxZAE)MgVY&)L$U}g>6KrA>iA4si)_??CfGm=(%~6jK10t2bGa2iX4AD+6Z5Nh={#cH|wM%VcAr{}HcXFmb|?417?Jhi_#EDV(9{9;a(lS-09o7cT>igm*2 zpF<_^4R8hjwz5~l;Y{N-UQ!qP>|Zye*OnsNT<%Zf6b(j;ES6RQFju&BU9dmS3J;0w z0+9V=_tw*26jp>>+6UB*-^zs%WniQ|&zU8Un_G{ah$fyhc$ER5{;>(42f-n}B9Qas zPjtTD*8XOus&mwcFmMVVc0+%Di-d?`n6|TV7}}qB;&DGKKPTGG+#O5G5OsOpwNr}s zq6C&dJM{EO2qn_|0QhB4OIdVC{L0pJhD0?MzFgGFd_m@Nr)Dn@=&!7Yk)6lqkNgkw zknlJc*#)B`oFNsV;|Lf#z*3>s&KBg_3<>GQWv1X@LF4Kyw zNsHek!8S%ZeYHf;mXZ`bV+aF}x`Fi34v5;(N-<~r?E$6yPf1O3s{~> zSBG!q6mlffpmkYNl<5O@q`s#%RFMIB_SSf33s@&>PCW54YlZfztHFh=B}?`AFOn1E zLd*!z5B!=V`}*5rzUk@bDe3|N2@>KHrWb@cc^VdE4IwB0!A;NhT%b%&&OEp$X6IwhQ=>-&R_*Z_~LA zw(ytTTyUT&+(x;Ok2!r@8G-K&EE(*2{<5vcs&9^K1L^`wPxeb7$Y}rxxulOEZD?DA zxip{ueUbVl?9HZ60us~yoAG$jQ0UNHCG|Ta~ej!wlX2TAdOs9-Q|GHijD_4<*rW<3{&L-jbC4j-;4HoNel@I zTKAMCHYRc2GmrU(D-{?+qWJb@Vp+1nt(Bxk}QjQJ5?b zKi&RWI^MflJ{`be#^oDY;VwV6?+?-X#2aI@bkx6Y?`FG;)oaX} z6y2}2xc|wgcGIJCa&o@(-k1dB-s@eP+jO#=YfOB-1!^G4zerzB zhnb2yCZX&vmMGvPkYAUwh3aLJY?1P=K*?B7I?kbNRE=pG@!kU=;%6ff&s!{3-qt5? z0&ytQxsou+$6Cu})ID>;{(fnbGO?G|jLn3&7&1Bfkqi=DTCLuh3H;Ee+I_z|84`J* zfk-3E{EEVk?1+MDaQO;>8C#0_Ob!0bdwGx$AKu4?2wwcbGGKES63op*N-_X!s-oWE z2Oyu8^r}Hczj#Hnr9B?6;J%YS2LVRI&T$Id+zP{e#)GfUd~0p~|AeKr*g|nCzFYna z#%QI@j+fC+#7`0mFOT>+>=-YGRw+tY>!T9y@xI7y1nSRh&T2c2q^*E7pniE@Kw$g}O8CVWIBhzcsxP6&=c|4m`z&E8jJ)b`(h0H!6WPE1r0`yvzmL8r z-NpqD2*?enxekZE2wwfe%;-8%#K_WSBm}=Oo;3ONrquI zeiBdQWBym#OIPCI^iG*YWg%eQ-rNa70<>sfd_SP;wE25=-L$`1zw<)deK&zervWsG z%EjBqQJJyNBJ%Sl`P$8z{Bd_vljFj1h%@LX)E3SH6B z5vz8r=6HHDrq}5h|8s^b9xT+Y#<+7L4F%DL3dbeOmNV~&Ee@`@kRK#aKar(b^~RA8 zcv~^zjknnJ#vPH51d`*NxV0_^Yd6N5-n^)D2w|^Tt6N8p)L6~>89|?aH`Zp|tSk1} z1$~Ni>KTnOrA=X*&xsR7xw0&fes<=?c=AB>iUiAm0`Li>f)d3&ClbE)G=z=@V&({| z7W(f56)&NNX$v2kOU-Y0y8#W4Cxo-dy1u_WY3OS$8>morKM)789Q7oO%bDs~M1oS& zxjHJeO(TA_V{JNSH}7f>Bgw(f4*j8U)$jVep^@Ozr-d>Se+J6 zL@2P=AytPCcElimJ`}?gEzuCw={@P2?G`|d#L-hBtuE1B&}J*xK>dJY$wL({a9R+`k~=D zkX*X87QDs03#sKC79W-A1uD?0_my8(8yny=F|Nz`!cH`%YBZ-WPSK3~xa=7JvV(9|gbX1uB&Pb{GNo>wx8o&^9Y1?l&f#Q!f+E2DoNaDNNZ`%lTPI3^BMNl#hZ9+X`)JcUHtB!w zQzdmPgm9Fr{}`L zio`YVYiupMapKmNszjAUkbvR1O;BONG0xF5jd9@br*@pqkpC-An+T1`L{-KNBxQyK zgU(RqtHa57P=MbJRaHj-7F5xE-xuY2=P}Cd??3C}QE5<;;*K5c%jNqYKbjo2iAi^$ zLIELvK~pdz9S zb7OgIx{YWYQfrwM#mdWQ0DZL1=^*h6w1mp*;cd)f4xQQeWpk;ug3k{95RhS5sBvB` zgFXylk-Fg%K)(DLE6@%ODwHHa07bxoR1B8%C)&v`_j!QDhWAkZQ{7`IgCIf2{l{eTQi_957SmXXL*Ru2*z7 zBENoyVatMzUflByG{I9yMWZBt!jRrGUE;lXc|einfWpvMsolJr)70g;w;;0WQd5xj zm+oVwbR8%>@spn=#Y4#nUMzXwkD;Sqp!(>-pp)Xn?>J0O1$-jq@Fd(z=BesLP)b0B z?4z+j8OFU`a&ts+PJh4yQG!5EjyE+nvjj5?bIj8>L^GA$PctTL?jtVS#=aml2{t=v z;9Z~T-0-WzWGA0iZCZSSMC{+?lemN!KLJWO-R?$uk?+^%pr{Jy(>K11eP(IwjI-lo zMhkmZZq2_d&kl9sr0I6nY(fXcmVRXRdH4b5F|izM{}+OF@@{K?3`RrauzKQYYcc@< zXTf+>6R)R#=KFg0(K+kqGqG-7M~F~eHOi{IQ*6|VPXz6bfSEUi?EfZ^`}czwgB9gK+$sb6FU9z$3eA0UPRBgNfSuvT^j zq-ZLh!Kac?t2mOTmy~TLbr>(g~&i$WD66yL= z8#p(|ZCD&(i6D9lYWknM)-ZrmO_l2xp6~aVxwULHuyO85@KPf;&(f-;^PR*%>L3^7 zZI>Dtuc#or%lw9;ONk| z&a1UF7Bv?oX`dObeBZyKVI&OhDA!;h1y^)t{y3^B6HV6oRYghB0d^f~yn6FfVv_nF z5cvaB^v&fMf!)XJ_X(uU#Z^TeNUGSVaEDZKfK!$@k2W8-LtmSGFFldf{l8g&O2Z{f z(Z9(ub0CywW%Y5oNR@1ce#Td(cMpt|FGw5Kk?98c?bH`9Cjjvg>&;k4umG0bxewM? zzOwG~EfGV*r^HvN;qZNxL+*A5XF5P;(i6=H!56e8yiiVUta#55$rOgFcQsGlV1Lc{ zB3j*#2^!CXD(~^o0({K?lsEC&LL+^NJccw)_4{fx=QxJo!wlyJyzNf9P?LygK&La+ zET{ghRu*6$XOZP=WTc*`**QH=Y!x^yWGL-I7oHreLLA#V=JJJZ&w~({EqSCBFdezn zA}7Pw3ctQuOm#g1p6LR}5swkL^4_FgNeeQAiM4pMahgg~1fW~RB&0Yr4sYvSySYqP zE5f@h{z@yGMDM?N?}(AU&d~+S)pEVRK9$x0kOaihGUG+S#vEVO$!3MjZJl6hIUCf(!Kqz{7I#7 zpH#nO1OA$667;H2vr?QcBI4mErqaA1#QqIaJ3EZ$)z}3XaUt#2_3FXoJ)?5zzsKZE z+QCIb66>c(+}rb@@dJEz!~`8#{1-s4+grtDoRAn2NQZ#-sGOxKM_w&k1Lw<+L$Tpy^9#rJ<9lp zV!?e&ZJx?)Gyj_AwV18VulKHlK8ZUB!8Un-*Anpz*rR|}pA3_AK0UW+iB$Sz0@N|27F3}Lz z$SG^xq_8LT+(^t!IBoe8&%7^LZXrDl4r+=Gp?Pm-hNtA3?D0qJS%7dG4!z>c(DpgoM6n9Z3N5M@36gpf zd~~%RA%m!u>adlf!}K!Zq2d4(#Q38^5zd-tpM0$)4r%fK*QHOAz(=@uFeAXL2EkSa zMwso>He3GVL--C0#>-i>!kQp?N+6oCnVo*Sxp8HK20Et>-oop70TlR^7LB) zed_2G9#ip>TC$@j2h$z7o(~V4$lO%IbS<_dS74> zy~?Q9aZMfBKDf%^@&JTRek$KO3nBsvA~@b0T@GT6Y`S68`X!64nLYc2M7xAlivdM8 zq!^FkGj;NI&(4k*bE<0o-5dP}4AQbA%`_fD8cA9?=6Pi9-7$)T;zz#6Y#1eVjj3*( zp}=c2irZxHr^lJ+i1H%SrQ@kSu^)48Q?d)8AIM!E9`l{ zmL#|00Dwssy${xVV6Brr$fZc~v+x{nfO5=nNZKgb32YlWLSUeH$PluJ#TEVo`X=L& z0QcIY=>njbvR@Y>QSOo2JXo$t8@q;KchXh7iw$jKQ42D_R3FGx?0MDq%LB5x;fWT5izcLr!C1gzVvPbKHu;Bt>{>qw zO4w_$no(Qq_pYVnSF%Sj$L?Y8erx2B9bw-Hz~ zo1g&S3Y0fkQ$8?OsXa@pzmHmoNznqp5{dgU1O$2#4S>N59PL5A@|7$xHC3Dta6)9> zRljvPP8^(3vgB_f#KJ}K2T^KZa)sq$YmO0fBp`gTORTq6YnN2U<5uD5Pw4U32kjRWdL6A zt@o`Zq&I%NRypP#|F)5&y#>wU>S+W~90aq0Z-wgKEDKgNT;QE}aCoFRz~CT{)dh|^ z%yI&~zz0M*ndF~Aqw76VdSE3Jv*FG zm(NvpcrCBK$_Ro(-8+sZ@v_DYHVUsw3F{@0wA^FsOQ7C*Vu27H@B~D6ugg!o9`Rq2 zT1zM|VvEC)wGV{hkuMG$q4hIy)@#KBG1r%~O`7v{0gWLzuwB@|xab}kf+HO}<``%K zs0$?pamEgu8impr5*)K`&O5^g^k>2;m85t%3%DD1-w7h_IEgwxui*ov3S{0BCU$H& zUDx%kW|M}hIh0HLk?HLnP*;X^1(Tdg6w-`$SKG*drxO$(h>ID$i2=@~W>$Ox{w`9H zGU7Fd>lz=-4^WC&y?=mg+3AM$zXR|Q@OF4W8yc=DKfac~^-QjDVI_5GgzCS02HHP# z|DEe}jgC}J?Y=)qR{i>w6aieyjRUWMxVJxsHuND?2D__ex^13JD_n0(o(;w-mVhB6 z%hcRCxJ>9cj+7?72Yp%_p!yfX&ucAfU?&Jhv0vynfoF8hqCiXX@snmy{BJ@wSusTrk* zzyUs_(=T@Ls7U@}O9d|Lu3?R4GvF-JsI`;oEhom-*G30iZ=3W{%^6jYO9ej851=;r zu;LVTVdvFzv^HFEuKl$?7QENnWt7-H@7u8bX0!7{096DO8YB2A*b~oVKZj=$F_(yx z&6h(NZ8Iixt~3;X>#?6Cp)zvLXyYt#lZD1qm5vM={}+iXvL^!pfWp7Py}-3h{cSHy z1s2~+o)Sdl8En^p3yRNLiUrh_q3f^UY$^nAt-DL~iPRJvcuOko`*YQ8A$h&_|FHL0 zVO4%#)G)jeBm_iIKuS^?lx|5W3F!_=0qJfeq)WQHyBm}ikY>}}-Cgh6{{GMPK4;(c z9eigWCyMTU$GX>=bBr;^nm4!s=-@mYh{&vaYN=eQD!^kmbk7Uk?3W{X=>PXG@Q)xV zr$vE1=h9$z(s+$9Y@armJ*eXIsZv>MK%GUqU^31eK#Ah@!64Y)fZa<5WiA%J@SdLX)YWo>8JGNHRPD_aYUQ$| zkCDm@$jxc_ilR3OHmtj%hT7^J+g|rYLxNyaL%A@8Z$z5B1j2qbZ)$$F{wJ~e zHQjocYYJs;b%_7)7ecocs8(ZRiXb! zZh)wbVu+s=gs()XWuQJm>C&FEF;H6(0sP+jx=YJv0X%g})wdr?_w0q+28Cmu2b|lZ zzqk06R`0m-u@#Od;#}-(3Qpm8>yGE!{pNd6(=9ps<_v&QIoQ+1*|Ofoj9m^d&}E}! z<$T%V)^-RskiVXDXFW)93vA0P{sD>=VBgy2HK8F_^-h>Vl4ZI~4dQiLNU@qn))U5; zT42)@sQ$?m9QrgSB7zZ!IJCLfQ3H5C{{2^Oi#KjCQhmx>XFe8IZ@m;b3a+On}X3vv1i&SL_#PTmkUK+$oZ^tg3-Ciqk=E6 zKS$)sM=v03d)sAme5WRQg7N|d#rJH%@zTSSVy`Z#C25Z`$>3$`B3E)0kL%&UH;Jjs z!2zDOU)_C(kR{Vh_m0J{i?|xNMDIE292pK95g*@pPFcQ^`F7H=w9ru#`(#Vsa@Pyv zCba!Hbn*qi$8=SIke4EHb@V?5Sdr;>-Ej^N~L!=ny!;%g6-2 z9i{%J=W8|{{HKyJSP$U`zYYftn*)#q?Y>cfApFJT3yPR4m6J;Gcb5;LBb8rxQc4a& zVtyu~weiGR-!9vWbo-JsQxX=K(3DqI_!mVHxD`%(T%GwF`evIRv@?OXx;e~pI$C1{ z$6Ee+=zC&FoB7>R!Oryh2e+!r+qZ0LEmUxI9ARSJwzOaES}-@0^ch ze&O+HqxbjaQwmR(puie07)k%-J`kQih1?u+_gEZg8sl!aP}hvWk0CGnu}nRq5h`6B zQJ%-?@Bn#(I4=xtn^&_xUEPq_&T>}l@B~Zvn!ho$YEoutWXvDr_#b|LZ)xK$MSF1g zhHE9LQXpbgX-c$;W79&zsy;I#0_a^X0m5O9&i?c{-@y z{h^Vlcey2dXmH2FWO|4B689-8>Zf|{hAjC8TA)s_;oq<`94)?O4tdO){fw7DK+bV} z%|m>1f)|#03ol3-4h*GO%CS&tmE0%zlVIm#iR*EdfCz$2`2Tt%GixOO*J@5gt%Pw{RMUaTN|tHEz+04O$4w8 zcOqrz|L>(Jpn=zv{rees<@2v27C1OdSNgy6gFd5DO#k&RBO_z}|9|xV8iDBMdLDsp zxu*}P6a9Hz^XOpIXrKDV$&ylZVLNgJ1PpjLd+>-ST~HrbZ0Qbbw^KiI!L3hp{P#&R zvP}7pkv}1ySkPv;_)`BH|NrlX+SQF_h0R>u+VMO1wq!&-|D%YyfPl55Pe6dp@AQ^e z75SDMqMY)!m8lkExfJYP> zGW?l3Gc%~kSVY9gU+`aJacqZ&_UHHve(b`6y5aWs@6IyTaqr(#(MG?K+;M zE~iFg4iN!TLI3CbsPEpTR93rQ24zs?_Cq$ZLQWyCkZ7)q<@g{yn7wgD-QAcczse{tSGVT_EGWGC^6sYY-;DIJgcz# z$wX161eCrnc4Ed`CMG##oX4?xu%b|#NT$W*ccqPCFCSlqq?E|bYEJ;a2cgNsgUjtt zx`J2it*zx|V~ENVMZyQmaDgJ;0|Cfj^a46rj3bWE3I^Gjsq%_F!@|n3rn!e|t@Wu_ zS8MvBM@Ga^(Z*@3F3$Z1rqzn*$*GYOmX{IvpMpWxUCe+lHXHYvn+FpD<)Zj{0~<*gf#dTP(73lBN$#<6W4~wwF5F_Ri0f+S~auQzGL1{w?0 zq)goWo9x zJ5#efm4ixOq~tUA7rn4kggP@T(rq9AC!^-@4Z$&Y)Ht4_&RNvRXwP$Dp86o|SI(T6 z?`nGjKR}&harau{udB)pEFPct75RT8E~?l2hCcvBN=p5(1dM5I4 zLzf9H{!>LHEM8o^#O1-OiG=Z^vrqESxiI=w=ib!!t-nbbthx#Q(ka|WS9>f|b(U$n z&S17WU3XxJH+X0raxqg;)rS+JOp*%Q?CWm?;|AQV<8~79#%~tm6zxpN?f-%+sZ4iV z5L6d{5OUjjZLa+(@2luaPn7hF*J^rkJSO}3E!E~s7-bS7`VyNt{x^KqVb?HPHf*_F zC~U1GnT6&FG*I%hxb^3J{M++?k z?Do6($7s(YMV^E+>3=pn-I#;F*s~>SwEKRU%4g+rDxH;7Nv-l6vh=})wa{X*lmj$` z2-JK&G{o;eTmD6~E-Gq#@VOGZg)jK60kJ~>db|B_+>?K?nO1H&Df1YJg)%1=CNe?9 zbAkqVkK87068rD#{kksV%j55t3oTXzA9#m8s7KTJ+njD@t(>e!%uKIO*Q)CDxnB~| zma6CQgYy-xXZh2}S#yd-ozdtb=k=ZlcNnM|o8)hCtmEZlna zY?JB*X+@>b^;6U(W~+2Lb2TUDjJdn(qq{p>0aMld6($SK?@_1D(rKy2FA|~#MwlkI zXJPX?^(E!&J6?!D?|{gR4-)q}^uvezN_#pG0oe+}pA+$9g6nohyyl9z-yq<;+&4SA zWI8cfS{LRh$~M(z-XT{I^nhyw^KQod>i2 zA$hW-HMKv?!DSJ!OVM3=E0k#m@VmX1z-4@WM=1Fc@gMpyuz5r$dPZW_IiX?@dQcTf zo)LhfgQPz9vrPNr)LcWa(}k24<>iq;%-^IhY7Ldp2ChkeGn=hBys+urHMSgnXYXMe z1v{+LkHxBRLntpJFbRuewis8p6%Y)Oyg-n-(rhhO*sNmGPY(!qzlVY^Zfb^FwmtUo z_Hl1awgxNWqviAy5JNtjtV;rHG`^07iW;55$5Zr(NU7%f7L2a%JGCbt6{8ym3SI0K zrz@X=W0u(N&({8R9JZLu4IS*Y+)L#{)~ObEFAfM1xycd>m~MDE=ziN?EAdj~@gnJ~ zJC0#r`T(xZR&>$NpGR&RQYq=|CzCqMb7jQe2sm*5GAw>b`!8j(1*>Fqj zZuMV>86*4U9H8_mlU#0puu?7=);FX(iTQ#GXX}Iy4jvvG`ERyr(Ekr;rq&n$6Ub?t zy2Oj9_t^|t>@H<~<bk#oV?LO^xk_ zN4hB|y=-Q7PaH_h5Iavz#v(3`G6wbEkY&xepgd`By+-`9=uw1S-uBaK1tF8eVw|SV zd`|Te5gwA)>Tch+dT}`En)UYd+E!#uywZHKx`2E0_wJAXP#~t)8lB1!SPN%HvoaT_ zV+_&aB6T$Y4`EAl2sN|LV&YyaQe2mc*?yrxHjNNH-_H9a87qu< zvQ&rcyLpyqgSLYD|H$Q&^a1^TT0_m`t%XTFv(T=}AY%ZqmIo5<9Ltga@bP_Nuo)wL z*c#z23*|3v;;YaqQbwdARtQ2ENwf)TZTp!JUwFlp~oe7pg##zx& z5o2u;%h}kn*6A9PEQMI74K&}gGtIJ3jdl`_Jluf@4ka3!MZ|j16&0D+gvDl)Q0~~7 z+1&$p&r_q+elEp7e|nKoMoKj((mHm%%>HA_Xr#yhByNW&ORhEQ6m(VgQR%cKZl+Fe zw0BRcP-!$rq=r>{dGVG>VUVb_Mk!KWDUBeC2W>it9>@7U^>M<|m`h5BD(0 zGzi>^^A+#~j)!Cq!=#B>g}fw69I9 zDo{W=M#9)AdZ!1#j;=ymtFqc4+9kBcbi0|O&)2t@fXn$$1p?`&P3Yv)|8aU@yw`U9 zT3YsfqiMWM_Lk2=rLrFrzI;LCdmO-hS2@_vRLSUQ%OWxSJg0KWe(gwaLrQU>9uuJP zU0x%FvNvEneKxZt#5rAFjcC+vxxeemO~YMtk85byj>dC=ClNNdgj5U}3T}mLz(;U% zeyyW9A=EZZY9E%j+ykgfqe`Gp~*kFg_%jov`|KEFl1#zs`!!1r(#ZAWRQ(@ zik`7>&7e7+1&L|4_V@Yy^0`%ct%pbIyq$4Lq#AW?y!|kv2n01=p-z1KD{kJX?+6Z@x{-(%N2)hl| zw8GB(!{|*M7gr!+NYK{SlglFN?5Bt;UCCU?I<;srmaQmpf!Wsr}Dr}24-BRz`=5c5_%ulwb*~M3$+hSmnU7mvc9`~xaCfh?w5?IhJFifIRuG}GjPw@G9bI{ z5lX|d##atlWUNx*7S!?DruBxjJJ-W~B#9HSry_g$SiZ$fH|HsK6TQmTXovqqt={4_ z&6lVUWu>AZ%7-VFSiUhD<>}>3hS6(9Dx?MmUvVWVs9D@yzKgyFGC(E^<{Dy%7s1CD z5l#omG8MB&%MGOkv92!d1v>mGC!JNs($hRqEC%7102V~LT$Ucx;NMo#k&%hib)84fn0rNxbmaNb-l*1K&!*9POqNP~i7!e*Q{nZd*8m5Ul*0>#tpH36%mQ{ z+zuxe2I9xdrRW6ar^;-mBmf`^Q2GFFXmv2FVwOFYEK;I@1aP(8%_g(c`)~McLSSWy zx7Z>Tjw`Avm!kC?%)`(U1rE*&qrXC1*%F!OxeE>O{m-zg`;r%(k+uZ|#d&QJfwG{g zQue&p0DP5d@qP#yMR0F0#fpeqv1qP9mRMD}&=>I1$elxsf}FSC@Hu|mQl)ccs? zLZ)2Gx;t?E28?vAi2}rHek3aZ__^;eNFDN7^BYf zTji4KCtnkSQJWq2g~orNi*tLv>AP|QLGwwRUi4%jeRV!w=Pkbr5_vLtx#+Kd zc@QvNgG%G!@wv0Rz0g#t9_=;c_F~_jQBLF|z+{XgJ0N}v3|#C3KB!qVw_Q@FyT0m^ zUtEH_$eUwStEO zkVks%@9ryh*CZfbj@Kug$EQQoBaelsqD|j_{f7_w9cI!(^gHmvL z*irg1m~6*zfw`mo$PG%bc(+ZJLURmABUo3d3m`5!`s;_k5$CiP+N|&;7h4)Hic?My zVf3FINH`k3V*KfJ!zx56MLlebtkd`kkoR`5j!JA`1Yo&`__X=MLoa5__hhbe^HGF1 z`aOl=fb?43Zjsl!pJ9Ti(Hj!rH4eqWG3*y2n?z5^gqBT~DCP)lbq3})y4J}&`Ph&| zz)L0LyV)6-d0$cd87@`F?Ry-4Ki}vRcuaQB*1^q>rp8D^$E(X+9&V)o@JDzJziS;% z!XV|ak(G7wcyL~zdiF$nAc4HGdOao}&BGf|F>bTz5_h-HU$*6uB@LBxodvxHx<2ln zJ9z9ez+!Klac4?w=o~aqVhj=E$r$9rE6hGR}m)nWF<_RaxIvg(u7+mKk0lGY4 zWRVI%a=jE#BaQXDNsb2+gbrekv1HS_gSl+&auGKrUWnA;!FzS0;zsOk#u;PRh7#a% z3fBRKY`oe{x_)Dy1%Rgw!n=(Dn$Nd>(lOE5QTun;Uz;heX41q&MPlK_;_{CGOl`OvCOaXmFFo#QbiYB^qc{GZae8fbK2yW$6!X^WB3h zg;ssUu8FtM_lO~Wv6C3O%Y$FY7uerUfc%0)pGo5d&Z#0WoRViKK31w5p3PhOKsQ%TI z!kwWULko(4f_84anX9YTo5%Wut!{XK-Q7P8oL!Bd*){XnQXUvQ_U!I0dHDq6W_|ga zC^|9L>|joKyG?bLz>61YZS8b)MzfI01TPlL=+}~9NJ&XX`}Y+lCNduX8*G*0UN#mt zAcP>NaAV`irfnpAX{`rC5Pp_U;^cn)_M1p;FX+2Xjl3L;OTNAG@%SYITt*aC)=45; z=4??5nXUDAr5IZANAL=62O*kJ;d0 z4sAUoE7JhcMjWWw+APNHg)ZlFJT%DSSG(_;@0O9M(Flu2vXL=*P7_$<9&QiMOH+_4 zb35CT`H4~TWbhQrm`UM=x( zjhv#02+8yiMqr){jHL03DFP^Qb=Y#bbT`OjwKIXre!g4u(uGD}KN1tmd(CVzeSk@W zbh#5HWpBd-gLq@wla%EpCjVmyzynKjb?*qd9eiWL!@0dw-o7PWA0(nk;qkBcL4rwB zRWc-IpI8jTY;7&@a>sD}gzi{k^vxSGdE<2z@-yrMT} z9>KL9t?kz>Pn8e=;lj_n$|%BkQwfWiB@z=qC>s^`h36Xb=-vJ4Ie6J-=L~*<0)@oPa3#%(%mCM@CeOS35yLsob888;u@A22ru2&~8t!*J?S5N{vLMfzll$kE^u4l zuJ-6h_p-Bt;-S)6cc}P?IKJ5dMQm(D@48XS_<$<2m{`8NJX%^%GHO8!pYOl`8M;+= zZS9NbK6wJgKZJzoeZB9mPork01wT_u$(MobHQtTb*V*Z&WvSG{UAR8AEr5sX?^SDV z%S!Qm;5Qr~t#=M=Dp(DK@*1T+2$$W1oaCwO~uhz5Vy;G2hkj&7Tl zaME{i%2ba}(n`_m=8qT{^k)%&Y=zF*Ipb56QP&i~*cFf1##H#COHsUwp34w9Ig|Oi z2Ls-$9V!HizVU!~z&%9h@rzXDgwJuep4fQQ6KYu#a)=nnEw5h7_f zH_3s-gSoFCsOV5p$4it;Uc2h6>y)VZMDJ}|eTbpcU)UPSEqPoGm*aPxS5+59GmNq&pjSjW%s-Lsfu#U zCKgRdtPPjPu&TP%)#zK$yRSdn{NJ0Jw-vZs8=};H_8WA~Kg}|veYJDm4garFUweZcQq>Kzq;?5^>h)vk6hZHJgD3Z~P9~nrw@V0@ZEF=994B6=DSme*t(wZSBbx~2^ za;1&Q6wVTLA#THc<=eYgT+pgi_^bb~G$>MKQ|xiuKZ~~(qeX!hW zSD8?~5&;{1${SWD3y>s6+&75kA57^`F@utb7??$o$Gy{EhL&ti7h>bM-Ov9x&x|4FedpCFEjI`ElAiSjyvO;LlvM-QQuwb$i;B8&q2uP`36#M*Nzaf zH;XF(MUS=Q_y)Y@V&l65Y(&jYn6}{8e2@G0o&)huLT?T^ow54%_N@wI>HNc_6W5v; zg{$)wknEdVtof;ty-AiXUs4@`%ZjfK-KJA91+ePH*&P?TG(yp8vr8# zbAWtKyRTvYd$$A45ihN=3ObcxhqJGD_WSCsvrYF~nCjt#flq;0gg&K%i2#T6PFuvk zLf?9?FUGA-;`(I_AJ6hdme^Mw(^nqzb+W9wo*y$5SK{1n-#d(CKB;k~cO55yAUf_M znqX3(5suds-E3yz?NuQH~%W$DK zUp*HW>01W$??b7}N9+Alwl}QI*v}|(vURtTxi~Ao;i*p)#hDR=IXG-lFf$j)7g%ZQ zx6M3DO?3`ixK%FDn|nzwQnE8KmiX+kFQq4ZVqF;r=c`tmN9nn$y6WpWBSlR@#`)IJ z?^Q;mG`EPKv@9*{EJvQi^vC*2z0{(dt`_PxiFK+5)C{RIyK$2a`l(d+%z=L>vU zvo3`AK^e9i`S1=Pg3*K8r`ud!_#o7bjZUdO$$VCC&I|7K)p{IqXn>bcL1)1AskQzt zRI@5zvRDztlTo`^AvG4DbdH73#BI~H3^*nR&p8|kjE8U-JRLWr&Igbjjg3b(Q+25B z@5=YDth00TvWk-kVr5f94j9z8nsszM@0dgf(#qI`?)zKaub<64Iv)w7cnvNUnsIjBz!aJQocFn|w+b3YGG`s4_CR!w@!V8)k?kK(doiEW<1 z6l3#y`#8F&Gy2te?+P8iY@)A(@Q*F6aLqQ*+)b?(jkx7HcVu7s?pUnT$WoRv?oau7 zlI77K4a@aL6&LWJ_8uNw&+%_IBO|A5)&fS%6&`reQH4!ixM$eEdiC!@GsOC1GY{iR zYS^Fv;_@b^Oc=sqj+^|?5%UF%XY!AG3I+0!hMjTZ6!|4gRqc;$k5cN7j{4mJtqzOf zL`l1c5Lkqb(MFr0Q8B|H9DY`~us?^_;c_{6%8lAUcxV=)O0W{0mIfU^vxY}ZHwOB* zmd4~PfpDXv3bCcpuy!^#hj|c8#n2)r!Clog3wCU>lOsm;_L>bRCYWD<57nI>$D2Eq z*UDvt7R2WZQhfOLn3UXe4GW)*|2Nd9^g9X2eDfN#pb<9T$K1 z{jxv$>*ju3)O{SxTR9($5*{e`s96wDCseeyp7Wq)8`@f6X97jjN@QbkZh@Uqk=Z&s zeQYIld~dt6{sj@h+bfM~s^sM1&n#NTijTzALu@&$ju9`QVM!l zaQ@4!c)-829mBFUk&N@rG;u5iPPgk|UbVv2@%xU{N-Nap&p672uio5G--6|Tv9cWa z^tR7wgKz0%L}gSseoQT_JT=@qbD`xTWD{+Q3clJ1*@$*R^638e=?K^$RMyl&xn(3i z(N)(b@9o!wErPkQkIzp3IfY?ooM!mkpp`W*ANM@u+28aOj(>?LzZ_IjT9SzftHa1q ztwWs#=?`OCnonwmcv{}{IXmThKKHn;6rc6T9no$elsJc-DnKf9Y*nJHj|*CkYs7w$ z7(@G8{^8M8+~eWF4~i4x29jvLr^hQS`D}BuT9qc#2{Q8}*w?iA(PkvdxEB`6M|#iP zfv93Y{mO4N7OcS;Qz`$)+d(2Rl4aE}^#C|}7{4N>>B;F#|6ThOE!B7a4mg)?e zZ#wP@=hwSFnD_g62=lK6NInM@f|fOAJlKMY^6-FE{_MJUe&9v4+twRC|1vTnTEmg8 z!I6A@sRQu>d1r5;sA7G$EOeK(1t)Q`0KP!vNel@I2;PrZw^`EnH+~BT`@aNA5JDMt zp=>7ivO>Zmz$nr@el@SwYK9vI3Xo}iG14O?_YIzJB1r#?$zNB{lhOXenBwB+5zUFdRES1@Lr z{w)_%xRB7%NV9W%l|zZ#zWkr@Dtoi@As*ROU-=x`@e9C72cxJSK{-;HvP7pqetC3% zHZxqS*_}WFUu_p=6x}-i9qG^)16~r8n7}e(vZs39xXcs&CgVB)^YwTQ9V`r09vlRO zh4+N|V^rLCqVR@?AVkzGTsj+2tVI1pvnZ1&QCH9xvTEG@tN+RoTXjxmg22lKr1ia+-&rN1= z{9_rrdQLZsCcCHrEa=D(p0e8doI2UW!$ZzW<}K(-qcS*8bkA)EIy~gTLHZr`sK?XY zr^ASL(p+mTLTL^A6RSPUK7AU$xB#0_!tLG^X=ezfev*f+fPA|ZfT_e`>}2v}{i|G_ zOzCC=C+E_pM3j~82pQ+}Y*ur($4vLv*>t$lZ&O}0!v5_egM;a2g(~@8erfJMb~S3_ zc?Dl5B!2_>eqE%nVeak_2GXH?s6EP7hg^3?f=ko~^;4v(6XatO2wP0BIaD2W`^ zyX#HA(Kr79K7Uws)NlOp{O@hC?|NcfFRji_4CmZW5SE#k%7M)hsG?g7&0IGQTfoTv z>6}f$|GAQJlI7=?M!Ee4Zf?E79%B6y*Tdzzze${qtB7y>e~^)}{RN-0JpyhXprk(N&f&Wma_c{!bY!!+G|ELDHWmqATTm zQ!AOQ*RC3H9#|hDe?o4crXQKeA{zS`IreJ$JzQQ!-<|;)d<=KAj zXMfR9E7Nt!yJlOeC&EIXv-Cc?eb9v@PLyg<@5Amj@>+jbMl@2K;;UBK8}-Q|fdwx86yn5QQ28_G`0o44!&m~7f?l6mp00|OzISnKH#|NH6}eM3L3 zMR)&NbbLr}-`zupWt(7uJf2+(3G&sKU1f^gY)G$Uz2nzyc}^bASL$j1Jm@wZz;-T) zP37uJsM`D*U>h-jPbwb$iAkdHFY;sn4!OKs%rZOO*qUwSd(NQsOhv3m=z!_p6?@E` zga$|weQF>q%99n#l}Gv1VC}i_t+=?{?7^+qfH~_j{l9WB+rMHP*mRHuf1yHu;!O1K z>jnRpn;z!5|Mz2%`=6T(d?JD7KeG?`cccFqaAC&#|KEJ{C$1w>K>ig)UhayC>T5kL ztn|;c%U_NCK>%t0`=iE$$k4;AF_e%0D`x|cGFDaRe5`Y+V+tUT}by{4k9g74My}hLUF;<7Wf+Hkyoa$_i(GH;KSb^E#IuuP|>@y3O z(U$~yir4tLOO1uroEC7mQ!tRjcWe%JjkG;>6B`e$TsogF?QE&e&C{M=NhfcyVPO&k zlj^wp0rAR~?;SliJ|M-N3uAf5PKzJHnsJ%@JyG ztzB-CP8#n43<8J--g5Uh*!pDwlHut7&%cby@(aE|fc%1HiH%e#(Jj0tCyR2vI-_A+ zXap?)ZV$BVZtrYiQnpiL{VUrcNe=K>%L1dTSxPE}ceGH+KAS`Ogo;{Sprezg`2r!i$vW%Td2mHU|46MM{0Mj< z4a@Om3eQuEj=oVW0LaH~fx3ZMI@gk9%omG;Q(jdos^;hzZEeqB)n?OpyM~gvvNGO- z&C4eTf?L*(6R_qZ$)+XN8#i21@lql_Jl?A;-IOpejDU?W5^3XSXT=1ojt3;H%vemv z$47SdAG??hhjUC6>?~$p*(40)a}$GjO~I?_4kJZbt+MmT$O3NiSdd>6Bbex>8cg_mUcW zImajnb&TYQjbG}?PoV<&?bw*3O%a3VgW7Fj6~Q=yF+}AUU>2Gg4ZL^qy$7kz>2bFV zbq+%o7ngUTPBu@oKt2Nmtf!?qKzf4re!NpStJu17imKjS-}v#1&-LuX8*E%-J05UJ zfTMr7sj&>Nc5WeADqZ~Jwwa&+eVIGBQ%mX?|j*wuFbL+SPcbV5@)OV+vE_wlb{F?voM7Jhri zbmeH;@!JPh-@3U1qC>e&-z%S6cK3BjKxBy_!xh&0gqoF|OJ=e_TysuhUmna7Ye=Pv zk{qd!c3^;(lZ0=u4AyB3X)Qc6HmkL=*-(=WrtYA}H!P7w#oD4fa#y_1qs zuRoG}XBDUuS7z5XcHiby&-h6|V1+GR%NjG4==tn#K=Y9UrGye@kj-H^F5bleAIK&n zGz4=ku1sgX`Dad;*nZ({w=(VuWeQQ0s)kF*JlJVdog#z{Zg}Ll^a4PcvTE>Rc>-oB z@UH>gv<3jtr-gOZ&s-h|_eDe7GtFnufWJ55Gltgw?2K6I-+Qber278Y*i{#% zQKFT)*``9fI_c*H@pv=sVUyVI=1d_(vm8QWVj^0zsgd){VK5B?TqBr@u9xqP_h;PB zQ%<@yNJYxNR{>3jjZe#+_udC1p0zVXt#_TX{3|Ao#eAWR$R${%l zKXC^FWg#vEOpx@C^NmM#)`CFrge?{zWA1U5blTGa7kU9CQ4dcyMqDuqzFIAgh z*`-m+&>vXDM{81cY{Db0oPLK7ZlV*(X}?(QId;5IH*JvmfnH_w`!}$Rxiqqx{ot9c z3;uwU6d%y%ep?v6_;ng9X>)+7&?7;yu+Bqz7voK5cAAGg@w=4wa4x`+JxpfCPkSRf9Jt&lsF>>*q-N@xRlAL|_XH z5%b}%R^}-@S|23K3AzGpb|lYymUqZiza8P+cu1a+W9xn*59lr~KF#G@U$4+!r+@G$ z(&8yjU?N{Mr-_zCOJw^*r(33)iw5<)&KY!Z5`1yA>4$m+5coOrE&s#A>!z=mwHx|e ze?XRFx(2l}jU36=+p?j+B9)pa4m<<|^hxjGd>ZY26b+!#E=H8kwVDGCIV`(Z_wXQZ z7#Xkehn*ioIjs7IFg%laVCI9ju9SJ9F-i|h&xWbutLP)dxhD9C7+v3^D! z@86v7fW}ye>1z8kT~p-D(V;Qe3#OCL^HfTN4%yAe*Fn`-waT+? zV{7yy(R-jtcY40lj?Z1q`Bm8BYOO-Ptp`MVt1suKBM1(lo(!Wo1=w`eMs_d=06aHr z^?y~!Cus$Lc{6A{8+N_-Sg84QUj;ZvnDgoTY*)t-(0;O*p8m*UbFjYFn!Fo>Fu)X> zR`)cSD?;KF7yp6NI+J?|0|lceoX26U=*?)jAkT;ycwvg#eN{`OLL=CVcvrm8rsFOQ z1sF)=kp8%_GP=J)@5G=4^N$MoM)#EGMe)rhhkC6qxJzzUi(_{ADXQM`I_1l0ZFa4A*(J`!60H= zc$`!1U61ECLSrn`(nudT1nDCF;y2RH>UxtSIU^I(;&~(^3_-xfh(ZtAWn@5Sl_0R< z>K~3spQP~#>s=%kWJX8-^mY{#yt@1i52$Ib{g}5lF(`1rK4b9|k&KlLU^NA57~M4z zfRysP1pqIK!Pnxbzp%OiFWU3&bAOCuPS)|QHIZ(4;4lWo_XqbQ2uSe$sxLi9K1oT) zO%lHVrt!tB_aM+NdwhNYJIYe|`}0N}g}%?!<=W)>H`$;>{?*v{#3BEvfrrP;Tn+bX zFF}sS4+H|jW+yrKxh%s^5P?e^hH=ANylQxeL)wRu^UJ+^js*cAH29RdSD9;+Ty9Hz zrDilZR7wSv*zZFN?x5ZfeS64m$=*DMh#VL&-1t3$C9;H6szPI!%KbLeJCDk%*xZz3 zQ&q>3_E?5`Xw!+^qwP;zTOSve99E-#mOm^B61Li*aKJwqpM4Ie#(@IktlA|S`igBH*D_vp zPP6QqjqU^jP}X6Kze#3%seqw{0&sYkz*2v?7AQlUczv@{1qVMs27e1G7)Rs1A+2DE z`6d3!O?OD;4J7isH$1cN2l=(;8Fomc(L;CW6%~(c$6AK)Nb48jbA35$>nWQvY9N0W z$MB*;`Wl*6QwCF*OnP5rUGy_ZKFh&6{8~IYS7((m`0&dyjraGDy+w|{Iqw9Z+})`q z>dQr#cDkFy4=ded@Y{7)D?NcO!wNbuZ_-A5dYtWGI<0A(Q#rJ|GU zI$Awq@PD0WZN`2bP=oYbcX7H({d=_{nV?T18&j^QXUi@Vx`}I#j^}cnevcL1`_E=g zf4sj8{IzIU?$ zo;E^lBjaav6_xEsz(iOtL;E37Vh*r?-g?exds~*RnczALQf3~^FL){{;P5X6paHkp zd$tF>9w9v$8Tl4#i!-A}v&XCIJueZ_nRjkbd=LxT{2{+Ad%`k6q0U(wH!V4&;r>=1 zD5n8H5IhDJ_VBj0Ke1J7cLW6q0~OS2ee18GO&qPW)hLkii(had#H`dho*n0a%s3f$ zqg`F!7WH4G@S*a1dTP#=!(CkWZp+DoousrMK{4swP*W(7jyYZLT}n4!NAVi$e}|86 zGl^O-`3&k?fZu>+rw2|Q|FGt1-^2xfn^_XUGknC{?Ajf>b8f}mGg z8-F9O`?Gk#;fL@xet@;`?4{qrGoRw*0m|QdYWtRwpM~jw*E!Ro$T5k1C*6JWPgH!f zTf;&E_Pfl?a=lB7y$|-!ZgClyoqThs3lIc%?WM$Jloz*xK1p?+DHWBUu1U0vF@pjQ z=%a}@aDBDPJKG4#m`bjfdWYL%;zI-DuBkxq1etVc5ecK*=1_Yto&)2&NM)$X!%x28 zTyy2;&!v?mueEW-0kEGUg}$dO9{~<54VXF!i_EZ`xnnt6@=E}KRh<+~nmmvL&mVdk z3N%X8Q#U)`QtIvW#Z1}mL_FW$Gzeb?QSTX*p zR;Kmf5`%_bFUr}zYH+bF{G+S;ARI+^Qh!56-~Z9@R3w*Q9Wq_y6ba;K-XnY?_^Y@X^hCN0%VV2tlZP%wx-yMrae^6NRNm1x~+otQ)!RnxJ{XVo==5nVx^gcZPc_ALgc zi{uWymUOF$`0#namck+XXr;c`zxMsP0M&&6f1bHmSSNrtAi)EL`W*+yc$pkLFahi~ zdw{CA(`xX$7cU6Eo!JHgHU)}#Gof!vUY7IlaBdAyL%j4=N?})sSa*ErgS5qF@>BVj zcAF@=l>4K*8;22EH~MScBQ)P;UZlY)0?4*-9AiN^TV5ypV;U`nRnT3i z7o=o$P&x@;%Rk2N>5N%OaSsIr31?3TBU9k?8BWtPBCwI5KKI|xH+N_s#D#ARPTaJQ0zWv6-+(fq++YX%HXdF;&_5a48aNSH&(Hs=xaMA* zf?W)f(tl5zL3MlO%LnhQ6&N zmC=rQRBDS`V2N1xN>2*Uge$qfsEBS?DqlR;xA;Fd@~>f#3s_P4sALomyUc+C_Fbzf z({UG?zq*f(q@mGC^>!I-mL8n1kKD|!1}GSm%*Vgyf*{;V#GP(=d$BcIU7#G107zt8 zsaD6N*<>MnaCY=7x4U`A`g)^KITj;8w1Pq9VFifkz|C;uxEBD_j|{<3e3(89!UVvX zs^x~kP~`g`K1jd`>p5PmNHci@+!#m8b&G|T-M|X{rEBfa>#3R|sI#qDXuB+rquxSy z=nDimpy2`T6gkK)r5qdUGW#qvKwz;zp{DiKrRh9Ai$A(BkYB3@{{lR5IDqF?V}wqo zryp7Hc?pyjKGCE{Oe+t$A&So+s|SIh?no9NuBPuvd^YszqXT!kZsv7DnlmQ z06Ed~E5u}XG?!?pqvP##8EJ`2BK}+wa4z%P2r~~}lZ(2D2JcVwgtd|c z*DeU*MmI7IAd4na-Xhq`Qwj=gesh$^-eup=*BbTyFi;ByyXLEJNp#*0bDU;KVL=5n zVSz6&jZ-uWN$-(4#CKh7NmU7j^Hc>-JK-6@ji;n2$I z&AUcR3x5}k0H3R(|J7`c#_~1*hV?5ix{IMVx<@oc^jyO-vj9YzBpD^YqK&{;(6zIQ z-^KCx=bN5(P%*M+0UjCAC|ghquL^eR)1hP{eQskJntb~`4njUQX4|8HpHZ{A zb1fw8xApI9c+~x8E$`@mon0+Qrmgq9J*?DZer3{!b-pwDY-Y)WlJoSmq_W8@TuXG8 zd{Cp#dhz;4vr?RL@?6xNp`Xr6;m7ZvV3$OeU$OWcT^?X>)Mhp4?M)+LV#tWU4fpl; z@7S6m+$soKb~ogs+)_-Ka1Pz@sIhNGmhX5Z2`(s@ZT1)C-Jjo_D(a{?1YmQu_Cp|N zoH;(e=i0;7$?jJ6di`Eyudr!PPCO|zyMm<`ib>AzOF~~WcMwxFAGJM&8kxd|ONW=- zZ$|XmI6$dlwerbN3B9k^KK_G9IZHWc#_4n3jhpbSB|n9U>H2=B5dPUz&0wlGkAmL> z$T6S(CWI-m8GpQ4_D*;vXh5zmN0-8(9|ktJyt-$hNQBW zzw%0yVDEL|vP6+NX*h%!?KNo$Cbd~7rZW>)34YwTTsWvfxhyPL?Jd`{X+u!X^s&yC z?Efxn-?3!gdC2%mfW$%MJnrFS`r(dl#$x*KXtCO*|7&@<)9dTfIm?dwz!k6uj}E7} zN|SnekNh9;XeoS@UzWZAA8crui?`#=R^|J$F~1=1;2- zG#?`4nnpxKSbG)<=SgUJfBn;+Ua?#L3W8au7ND=M8S)x5WRu&speGWrcU!!!S@s@0 zX*c6e)Bj6&B9q_9*UD>&QH#Nqi|5Qwk;(bHTybxZ7`UW9T2b3Mn#O0bq%S)U>edW& z>$?wrg|7bsg*NI}K9%!T&k{q?&x+6qz5iq|Fs!$v8otN*Uit;B6N8dS_-fn7raI`c zVH@1U3(QP`;BQ~P+*%uE8j$@?^W7`=+#eS7bvJB!_xEJ}(7s$Z_w9R{arXO%mc&0p zDnKdkujg&TkTEOWAoStkjo(#h$wYif0$wU7zpt5doHfbbYj!Jj+x5#)45hG}H+~q- zgejRZTple{itg36<83`Y>Amb7N=y_kiS;vRu(wJ}c<*ljo+i+qg8qYm-{IE>h?Y06 zqkrAj?5ozxo2|S9pjcbZ&ik8);R2Kg`bQs5t zKg47LZUdzW#;Jl^a0M}g0Ya|(LEw0 ze)o&~W2gDeU-(=ZKQ`EeqPKYD519R|J6OoCqohx{cuLu9#2eK+5DK+>{~?Nv$riDQ z61M8_6j2It&k^XKjgXEcsxsPqUaK4Hx39l`{;O+-r)y6Vs+eh=N#a*rYct+WQETa` zP1s=68)FZ@Pgv)OG%wG>Y+SFcuV}=h?9LoW7r zYR0X~NnZDCDTzKcrLSHjvs8clg`g4~=gXjIyvsGRGPgN+ay?nxSncnhtxsBXh!=x| zT1kYiYG~QLPZukE z&{)_&33C^>v=NHib(t+MDKj$*HW?9PQ>9@cZq&YhnVtb@?}L8k!dRtZW{PCy54V>5 zYDveg6q?Fqz@KVMcaA1^XnS(xY%JYvYgiA{Sysv9*A*!Jj#z9^%$ zDWb~3L=uCiY28Y&oy`9C4Hv9WzsYK7(?#4l%tt2q8W08A$H?Gt&E5 z%k2Z&k^?(+@>RXu3}v6za6FW#EET8yN&aw^SC-LHqoMI+2u$lN4uGJ-VXEC%d?)}z_Uv6M<>9)%!9)PtdQit>F zW~pJDhcZnY*t%}v{M&PJXItAgqb(nM_UWU=a0P#8^Z_D9W3oOn46EWrf$!Uc^Y!ZQ zBXabKxf<&|mx{p`f*xzdEk_k6_xQ=lflQBw?;t|SX&;~s;QE0ckn#&6%NV*ml!(Zs zlC%AyMQW>}N{mF7WD%;)^3Q2*Q=D|t>I%P%xz{4y_V#1-xUD~Th+KD;j!mHpdZ2ta zpV7Q@kUCnv%z2odtC`JC2?6s_Jbji1)6N{w90YAMVMmoYg4{D@fd66B{W9xrN^3-Pn#3%vSAyEH~FbEOy>Z^PqJVvOexD}JpV49 zCFX#`LzhBq`DwJ6Jqsff_PaCZhLkd{Ev8KV;9v^)g8lu4n5FeWKSrEWdJLO>Dh<^7XfnnLV3xnJv;Ho1CW+etx)^>2T zcQ)yB8^%8QbVIA z)i}oeC*)GHus?jOq2$R`H(gyuC|kwO5o!4c>(8Z^^BiZFGolBP0(}Dud86vg@b|%Z z7lz-zTXC|SMJm`cDXj}|Fd^<44<~1o{h*zDLRm9a)Ejt z7~kP$J=2{lc`{FEM8zgd&M=bQTY^sd(hnSWJ?$a_pYzdc=*eVU!Xoa=6U1im2c3T< zdm$t1?Q57Z6cU(#b7xAAc8UJaO4O-_SLIOM4PA$+^^{T@t3 zeSXPMsMH&@-g{?8ZYn7s79p`Wsos@-ZxD)Un%a%&n)WV4ud)S$W68f`fp*zRk>_Ay zI9s+w;L++zed61TJ5CsXgA%a{VV?!N>-S3n8wPIz8_)MDoqEHt1LvoCoXF@fQBi(| zPn9kWe17tpuI=kF{v$OXCN%JCU&|}Xcsf=+Tn*l43Yxqm$vrdPy##6|Ufk}kkTK0) z`B^o$^M1jh*I+~hHRt2p@K6wfSLVvF+B03Ykc;h4o1*u|5OCl`i=w!P$qFN(AiSv)*mzT^n#IdaxDSEvmQ8HbV*CxW=%?8b7I;uG7 z3=9jD{%s!;b3(3?71EdR^rh%HhcL{REQe2@&nH*_s1lb%qr_2gWyT+RV%k{xM*Vp}h-L%>2PHkw4g*1oXXbeCA;GQ+9)0 z=|v;N{SeP*nYOZRI)6{VB26?iEnK)a#>@47(|&w3H?Wt(_qKT9nYGCWre!cr=*|?_ zs0>8()3+`EMYNCCzm-HzJjt&QX=Ph6>d9GjVTvXg1z~JO9XlV72>Sc;O4!_zfnN_t zC8ROgc+-te8H4Sz^-=ij%=ss%z7cYs2zVCAT1M#uZCl7c*!od((i2?gBjzSi5UvQt z1y)UMBPmp3Rh+XQD2aGDxBuCRlIR?iEtzL|3A;aio10`&*dpq|>7ybBLHm_q=Ez>S z;lcczAjUfjE?%SUqMX6dr|{xxnPrw2n(1N8on|d`NE%vpA`p!*<94t+=9Ve{i{CQ0 zW8BSl?D>PB+YeEZ_^cSqRF29EKA?_smF>*i$F08Cj-L?`@5=1*?=GcVPzUj2zVZh_ zppOd&rVStuX;Ndko+3+m!3PmnG@_^JbuT*g_63ioPi3i;ISK=2xV37n`7gp+GZN`yc$jrewu5Xq7uR zb#z&ED`DiH1U0)~SPApOxPF+!d9}KF*$cUUywl*3rhqZ@TY1*icf`V?xe|2L;nCDF zW$|nMsGa4D93?SH1iV_|ZLI;wc*hDx)%&kogbtjzY`t{qdJcR=*+Mm}@OOn@T6;4s z7fmTisp~P)k#L#V9#KTAA12_PQBdLer7EsYY%6&D?>{|!_>gxn2M_P*KSYvkI0yl) z`q*B!e;b`zKa4)@o~QM@9WmK>=usu793D_GcK(c2wWHx zTQ}?&n$v&F%H$c;Sj>8WLcWzyHofE7@q+84js85IB!W`>4ELiR)NG&k*@s1OgL|#Z z)$O(K-gXV6re#{T9zTE5DzF^EG6knQZoo#AdG0brrM<>XCfxnjaXM#t?Q&!&Bw&tm z>#_SHjW*{;2@1qvIm)B`k)#EKvPTg5P^rb!{LI)jTjX+Q5`d1*8W8Hrb$Ky26d@tL z@I!+AIj$gf5$oI=&n;`6EEjjx4Ke$>*<|Z&2Tr?SMe_7D0sX_;9WI5w28Z%2n#T8W z)M1ffSO<5&*fDxsv&slWr?*J5C$8RR2_L-e9kwtx0aGBh$A1*0R(8JrfrRv zk=4Y(PBppe`-bS&kq9r;m!If@D6aarqUHE{ya1*o=#;;*Eo8|1as@;RaDz&Zvo(ku ze)q4y&)#+gP-Wl0*mRMA?W;YK1+Go!PA+20@k=LT?FVNUYaUR|cY3Wvm41)>_7vPm zug`aaCO^Z8ALf96_%wKNrTCn{b8?tPBL>)z@cb;xurHag(FP6gmz?bYq--xYyJZe< zm3nB~%k zouUo8iXjcnp!BWHO*hPkDrQCLQF^0kW}sVga=ilcp=YV34P#!^^WLf7oX~BI`xW*?Z&a5aq0Hh_wV&Zwgxv*9>Y4mg!i#fe-!rfIp7gG{hM*^c%bWD>FDQF zJ@{Dw7?X8G%pNF(&*=IGU#SgJP%_jth$$2RZ9fU|om@2LZoY3(h>GdSSNm=Zd{&+c zhd*bnEGDb>{;Ngk0LvwIKL{ni@vD~>MFUR&K*;JQB1I7o_9R%Iliu-y#;u#JgUnR1 zi#O*U8LwwNZF>pWj`#nh8CKTJeBk~f;QYey{#*^Fa}X#WxPEG@h0xRzZ385RW4c_@ z&*>G+${@t=mqEU4zFw*3r7wt7;geASt+3@5FkY#~0JrOU*%9yH4H*`0xHpgS#DdX`<3E*wOFK3`Ocyy2km>!p7&+<9`r^j(=x&5d_9)W*F>g3v%IHPC4bjdgbCOzX=SkhV>wY} z-=i!(v#^oMEhSK2Y5@QK=dXisX&?SJrF_-ZIz-BDaPR9=Bci<=3k~x0;>eRbF9D_S zCzEe%uemB796&cib>%ct-VqTQRoh|VAp>Pcr2g1O)gL_{Y3SQ~RI~hxO1e}NwSscZ zBxL!P9v1E(vaIkc=7eml%uEVfwdaj<+1(z;nEE?Va057{mA`uxzrN*49@K^X3JMxy zW$G7ssQ%s|-Sm#$lP&9SB_K!%$cz>sZBrJKntmdz1gHVKozoM44uPpQD$@(CJ=;1N zOrTSqPAIDU@SbM6){uEd$MJt z8U(~m?y+dSZdUGJFta01Xf9Q3F{e%cvGdIlm!H+`s-CBRvyjp>2w z(CpTxSV5G@9iLPF-_kP8ZIysVHh8@lu-EMV8-TgsDbm;Tf{J8xGP0)kD_vjQCKM=q zD`KuHVK6&-QGt_;+RAPajfvN0Uwv1sY`j~fLL}<>Qqy5EjIzQ0cZflY{xa;vvuBMJ zHd+AwpjFsGmh&&)GL;u~x?sG0IHcf`iL0H}TBD&k26*?&$xl>9b{1IWhfG5u7kYtu ztJSl;8(f{eI}ADY^zAVkti;>5(8du1y@)7$>4Xtd1?HvK>`NigkP)yut_iq7z)a+=M+^?crKN_tf#eWi90NBL zve|V%lir@66p>p*!h6S3%1#G+=jawlNWDQ)U0WMvWh%y~RGz=HnO+f-N#d{HP`jzHD&!IMs}+{nzbWNYoo}$?Lnr0v1h^eyForT-yQ=xf z0MDZqNwBRxLOW{YFmDD1v#vDh)a>#YBbHd!OkSzN`@6XPc|`|0Sy@!Y-y$!Ap@J|| z6E6rAvJa=c=+Q~(iB!&9t$ihx`!=twRwcaY_wp@=jpx^X`(>CU@tAK5BG_e32SnRSJOty5n$Q^ znUV$uHaf@?Q7I*rTFzTse95yYlZaen+IjYRL>Wm;kd{tWU$csHynKY|iYrjY&c9C@L=Fs}7dwUzfekwV3?rHAY_K(XA z1CLdGci#4g2M?Ie)rmrQvfS^a%a0UX6a>eyPXG$i(qPe&O-MpM$`gqWysP z53%Uu4-8j_mdB4kH97&m2cRQqGryE;@E9VT@zmMKI$(pHAPTR&^h2{+pPD4X!q{z? z3veje^uss+S( z!D^#8QBkp4_=YUN>kwmKt|v*oWi3uab87T&-r9D=I6V)(ovIpXGcnj%k**5!r*;(Z z68)d^1eE^Ic><^ZGfn^ge~upbzi<8jX$1;ww-KIbSAl;)| z*KAmkoRt{lCg`41REsA(Un@RRZ@0H80CNmb^lY;OC5W@oc9zKjbi8n90~?yhg8VdC zQZzm}JN6ayfdeB`z#KB2r$Xy#@56h zOebt*Yv2>toe2pT$?Y^Q&+UsD-;bGq zh5PeyhfW+EV4Qt_j@z!=v0ON#+vb!{WsDdQF(9{ibvoF`!G5_@_tZ93A@wAX_hKOP zTc=n5M&#zBne&bQ2kGFE3Rv~ZF^_`A*2Th^C+Tj+d6MZXshDaH*?PLYB9ecLS8cqj-hTjcBG1y^ z{jJ%1V#T5#?jq|$5fM!RlPc1J0rRKzLhIPOnWn!a=U&)q!rwEcT+6#k!%shj`lnDYCdj;ZYb(=qjb-}?Ws0&;Vs=v2)l zNl9grIy)&L_6XPp-)-yVwAn(Qe#eeCLO~{uSQ%YKUyAw&e8DzOmF|G?tF70rTu+-3 zR>7yaki1kq1O}@8=k@=-`t*6U*2d|h>S8^QTr|!@?^!=L1@^N7msTMI^oT^;?5>4c z3!WEzL0@cg)(yIJ?+o|9Z!3D{=o$O(Tf_SPKN)uKe@EANy~CZy8wgS5N-_ zS~;@9huK%C)`ZQtf>q4?@!4z{SO6SsgXjMoOjw^Ow%wtW)x=n!U6c2+%wbjoX_2&6 zeG$eLP00hAn5M8w{f`yK;ffYS3rR!RhN!Sz(*K-gfqnDJz|2OCwhUfG~e0-Z_v?};0IBj&cg&x3olBnihz{ti4(%7mbm7U$(=NK2){ zJZb)uzYU=FphRoBBLS|`HT=KLhc_WZ)1z!uG|g*{Ang(s2A9-~#8|r;$5Q)}^wgMl zgdcgwN6>TvjPkjJA&I9|Q~0gWlza z%*COqNmtB3%!7H+dz5Nd`6ek@+F=4Y05n0tOdcF=fqEkAc?Ea{J`Ms4rZRsK={E99v5DU?ywa+s4 zV@u^ms9hG+E_=n_K44Fs_*wnZn{7xN7+$|F$sK1%>c>tHAj?&dp{1%PHFFM@ocL4J zHy@sd3(2|OP^zm}h*-yT_myz?Y`{fv`?)=EPrakpCa%S!xR;!$q*MEILo<07Q;;-7 z%|#kRZn#4B@zdtZRk=ErGSaZ%G_NkzDrM zIMj*wah9=UQ~NF^uICrl{5?IKR9)!1Tx=mbX2IBTmkec3SGp+a59~^bqg(upZt;P3 zb1jDA=3Uqg>DWn=nA8fr5|j{BXN`are$$h;Nm)w)fs)yV+xJBV?^2;+q<+y4PW?MZ zU|FVuWjWyr(VlJqn+O6Mv$^|jbBoh$oj>1Qexh(Vt!+Ie3uq@&7f6c5@Iog)X@6b+IZ31Po)eniABFAR%mBJzxwC!Iz^ z_RR|BZ7tY1<7~qDbRrCP3D;x>oc`0pZ;$9ms568RdNfJZ?^^;{XF`(OkDU;OtWHNE ziG}pJdw^RNj$^4y@CIfE+iwW&GfxZlT#J!pSFVylwn7DtZ{cKDw2DT0%!-6`aT_*QRIT_hldcR zi}@!jQ3F@mq97djg%=&Bij(4R!;a2apXUiSFUsO3peQV;KWQY;Woicq=j8N<{AjE2 z&PIyM-oFDrK+EC5HBV+lqC02p6MBoEn_^8l5uAt5to^B+@J)EfQFfycwsRRacoYJx zmq_mGbL2McsZ~R>qmS@(g?=R`UxB+@^Jc^GRU%{J&PB2lAOUKdyVAGSw|w)%H8kK| zOc4eb_MAmY(ZpYxCf(STq+BFL;)v5S^1j)7qIa8VJoPHM=!A7j#8~heVrXk0IRbgh znl_wvbEKr|vcpHeo5r_)s95mAQUB!h@3~A{F=~C_y)JcQAEg%tS22T>C#of|Z;9VG3o#Odc_`+O;wN=$A9GHQmGlwzhry0e zZQjO8(MHy(?BI@iZVcbn(Z{f2GIwm$oCjh5IiZF43rsJ}!w2DMc3>4q zV((jf0+!7`VAIH=+IeOGV#ALitv%f<^KlZXSPM{h>WL5%8-5Zi?8Mm#xjH3_6wy(J< zZs1*24i;P5gTAms#RraKL63Qi`lsqQ6s?oxzICf?W1bt96|n9M=~WhQnjUG3BvlvZ zv#oocu*o*_w{eD?u@#S8lqjdB*va|sC+o&{pJwI#F4YU^1LweIlfFVwa_Pkzk}4_g z=yqNpr$NBA*dI~OZy>xl_r;DsIC8rC0;Cx!YVYv!(OCl~P%WgfO;nkjOtvl?lxlG{ zp#XCPZVJY@4;*)r9;f~SU7u!7#e`K((yND;2sQSXmzf6nB9fCZVdD}nOqjK(BsWhJ z{cU&qsB~SWQ#M#3tONo5+9hjdS?NoAb+e-ceL{+*6Oo1>4VbV)Zc>#4%jP!dZ<|IO zV5qor6{HuUr5Ti+Z?&Gxe#y8?;Fhf>*N=LR5RPe|(xAk6?rjA|?%aeCEvSN=5kupc zarcGiV*cI^jb8rPDAdJEn8{?Q2OyQJZGjlPbmeY$U!vhmGEFu30j~Po>y22(k$I+O zXi@?6)3^&M-AFOIjp0N4hhO@JTD!~I&KBemN{@{56f=8FaC8@mBJo4Jr^`i?(o|lf zdK4X^)Wx(^iKUCnM!6U=KG~M=^4i^Y;S#4S4Y`xj;aR23!l9>8c13?gU+6+A!H053 zTmp-4Hpu^-e$YJMxABIgQR-CogZU%%#-T6aCb6Cg4=I(_9(Dp}*~g`C?>?P%K()?) zu`+s5{nS$+$Q;!6LE#6UQKv_9mQ_P}XB~TbO;f5ii{|G1TUb9#_wS)Vmnk zT>QBfWycLvb^PoQRl@KvUXO^N1JwL4F_m1e)G)fsI^#G-niqGO+f%FSP>7u95H`fLSFDnu> zr0RAOOj9<;*L~@E4hfK^mwnd|>STT3*)U(GjJN=`V-*Sh`766C&L4@lbm!;%YFb_Z zH&ruk(z7uDi;ZHPwtGx2V5A*etbt`^ETUO8wJM z(^6%3KQ7i}cwiH#^9~GbtDCt{{ZuK2h&Z>jujl@P#V3bIYl2X4Tt|F3; ztCn8u!k@mU@5f+}T<+Q(5wKwZ8Zp_$6lTRkY%;Tm(u;uv&~W&J;m=}9`d8-5TnGF zOZ3lOfHxSa)aEl z-KIvBx;W&kIDHF%8+AdyN|_0oa+ z^b0=oqHfhN)Jy~hDEd!;ECwc%++NPcr@SL=1+*q_C;RoFz~L8>)kMEPKyqok2~}nNn09N?ZA26~tuUjG zn0DcK2q=1YC>?O=szI#0vKTUFCA2Hwm2_7{(79;XjpcocHe4+O!dm|8d+4=9Evy2D+2+>n9>hF^Z!HW%b|V?2UplwBt?}gWP_*( z6_UoPGBT?nQgL0=8*?wd8ZpRfAF+^J4lba2&)M`m-tK!ueX6A`F8qEBfPo2HXm82c zb^X8x;;U~|#5)g*j{8y*G?0awUZ;b&@O(t=ca7-&h$WQ=#5?bxx#%)su%P=W934Dy zb8E*!8U4l)RJ72gq137A*;LF^`eaN)iadcPjfm0mYR_;A2_xalp?*qJ+w`fd>V(Nu zSHC?2+YfbuobyHc)RC5fH%HdD3^2g`;MgVhC$m3?{ZQe@D42lqmD^-USnfW#=FY#) zP@eoNn!=kBW4)cGXu@5y7R{7ug=6iCNpW0V?>uH05M3K`p4~|ItWD(Ep(GprFqPt{ z#+YrA?9y?YYbFo0%Zl2Nb7SU9xQ*$X63?^y8^Nlw0tC{Z+W(V7!k?zRN$IySVq28= zjXmona*Fw^4<2LgzV9g;DhvH|vqP%`n%SKg_xzO~SRfDcBWt>H%N%KdLCh3?AxMdWn0JSxK z$wQH@Bu}PI#Vafrq#`;G_^E$Mw=s?G>WXV=Km#P1+n((DO!0gP2I^dN!DqVa)W4!a zx#dOg#gvu8T)IQFl|Joe-Zw`(PoK!LMB&6uKq2eIwpjx6#-cst_SL~VW-L|2@Io|i z$y4E!e(Kcis!_o@5UW-wx@$z1sG8hO;Y?(<(P?BUR!oN1>kK6)WXb^-C%L;l1%e5g zQ$8c4s3VN6m}imIedRw4RqZg&#~?3O)*jTnpdv|MZ*}l|N`llz&CX{)=s8+mwoSOR z?j2rF9I4cNk%^o5_VED({twIAS#^?&C)A1FNq9B9;JMV3BP=EPI*dM58%Xo>eHFa; z_Jsvi%3Qo4g*@)A@%uk?N^|k6n8qV}@WV-67KifJxmj)HGsZjV-^$=d89fbGRONeBX-ss=r|!m&D!f{&>J>h_8-u z26!k6%*T+b$RU2JLCh_EOgIM7s$U5trH%{A{Uig*ClK z@VTxLJ{ewC#dDtpuh7?M9nvV5XF3o^n@Ls8i&95{5>8B%e7Hzzr%4nU6X`5ENjKP@ zR5E@xHRw;qSo`y+g!zpRCbA;&+H_HqOVyDg@jK_}|0-6=W9^*sU6MVMa~%a|u&$3t ze&c-#UZl8Cv~Dr#RS8=vE-*&CrEk?!-sV;`xwjVL>7ID+*k`(96Mp*Ot77$2DihIJ zBj?VohWw^>G3jpNn}Ye4)8EvVIBv&qGs`4>{D7yRWZsq)80q-7Jq-Z7D_l_NpglI) zKUVtI-isEnt2!fui1saCpuiB*=t2;RF<|UGQ30AJdzDEk)wPP>w{X>N*3Vm@x#sI) z+7yXb+#w88)#&ArblduEUW-tboM_mbx}c&M`7J(MOYUv&Kf+lHyGfO0{x36=YWbFG z*a$p~ql;au`VS7JX*Z*Gv$+25@Fjh^%Ci*xotCR{npfpbA~&brD?_TtM#ZdrfwesA zasDg6NG$!g^X6&IXG5n7@b})7&<-v6;8(Njf6pKX34I24q*{3$yh8s1@otMbiQhftg?7bQ+C5Pj!X!EkXs`mfvHnD z@|88s!|THbpm&dNd?lFy1}U75kri`A1Oyy*S9C?L}u}>H2l~*Xgx@c3eZf-u~`DOtK$%6puhQCs-s%UdiPam|0$p4DgZf^tq z;)9t&jIAarYqE4+)Zp&QhHee2)=?^ry%B6ogdy{=`5*Sv3!Bv2GITG*68YsD{?cMH z-bj^~;|yxsaV=YR*=6PSNYf>p8R1vnZ; zwhjb`Z#xz>sN>@WsG8NvdzmK1N99}z)Hgn+Uhg=s=}RCN84B+0VVe}&uBwXXbvQrT z_iv!|qd`(BNtu7RukJUey@7CRO(Iik?dB+9X?Z!OPHt6T$bo=KXRrF%t4xEhBd;&k zIp`v*k-Y9wSG#9^(UsD}y(VGxWOZ|xl=es>yQqjBvJRn!=2&@)x%TSDYV6>?IO;{9 zTqfl)8G13eK%y4eAiB}HmK_Smg^<**jJ3Nt&O`J3|1 zG6_}L2%KtQwTt>57fRjr+0C5cB{E{L?Kno}H_vP|%#Lt20d-}MX<(+8ZB!}(SZI7Z zR0p1>XS%BXa*EK1R#?ig`ck?d?k$6H{>nnhy7aPd5>q~U&! zDZsCsn`#3RUACctzmN^@Q2i(_41c=`1SF zx0Z$FoZT0Oa81mkTQg^MYl)&^IHvGDZnD~udL%{CSt`3Uz@*uh-@Pr@aDQ!*_96pc z`h$&B2JON!yoPbv;4hj5g-}ka`5kq7ylnkFUp@9uarCbzubTr<2id;vh6r}_Y7}fK z$J3I1KnN(hXOF_oT zuDH_Y`XZI+4xg%Q?D37(H?D9BUf2;;nC=9GHYWD)7m2iaLwHgi>1+Z$+MZXeV z3}Lt@d4cX@fY)sx2jVRd-O1DnSM;uj*?OlIVVp~I1|xtyqG>++dTXC&!^w|xD-?RY ztVHW|tZJoS7M;?cjP#bAY2Kbyu5@Da?8=|&@ZH2`Dz858wXZ0g{9svS)$hEewwMGi zdES{5yIIToq5yvDvrV@6?zRKgvU7sf<*>B)TUe=yEsn@M)Z{M>S8(Blm`{N+*cwBp*Lv(ix@V z0cy24Uz3{gL%=o(KpOEv*9IqdW;J;$qYMdKgQC~*MM7VhUlK6QaD?*aB@em-fuaUX z<7h~OW6Qw?3GNuEDKIc+KEupiIgUUTC4p4oR5C(fNsrnePReq zxTR$*rbP1!HD(pSNYf7v_`(kOfH04G9S|{nn4C#pElJ?&TRQt*R3k<>N21W<6Z+#^ z0WeVTHPnR8!3euKW^g@MpV>>u19siI01}^;Us#MkyV@INI479q2L=OBl|OYC0~5IR zj9G;+O6On-*q_gbO@sTx1ZhCtQuyBt?LpPEXSO$P_;c>*ZQol#N^5)F`zeHw8?g&4 zJ|`Jb6N*g6i8z9-E3V5)a8pX)tUg__BmCeWG;pTAa%oR1<~m&r@P`>F#ksAGuGw;dOZp( zRd@`C$W6YL-rUU)$bZmEsD`3e79c-Sy~-M#r1<>)zjQTFG?a0kuYa5!wCZv7MKa%l zel8Xo^wS?coWUZ3s9iq}Y=j+%P_CBSOM>WEdkDemmo636_e3*|@IM_{p9)I4^RHyc zzeWM#1cdg;lEW0*L&>9`d4kw~L~lF--a|~IU#EWyq&>~-jiNq_j0hOyV*|=I{50x- zac~yJMVgmNO{`o=OibScuE!O@%(-u>i?2m-ky$l4Z@B$btoAVA#$xr#*uyXEKEL$I{Ypw79W8W`D zx>r+@bXtFsA2E##Pznc`D5@Z4c?02FsI2Lctxuy*N6teFtMF?ZuAB?6KITJ-tw{1= zciF=o{rv4s3=QB_4J_7DOV7g&PKbufJ0R^tKt9f+YsRd$q2Y2SB|$hgLk0ku!QvHq zbqI;$6CrA>r>E|#Z|pxg_NhiCr+JUj5FTl%jAF%+KI;ee9o&z!APHs0DdtT1F9UB7 z3`nctiVPB zklmf;cGb8~RGp6gL|ifQpJm8ym1j+z##psbA5kejyhH}hV^Jh^$Xa~bXarohl%nog zQPx{?R-wb`_RX{fC-){!MGeR(K9%Sq}C^+GKGB|IhTWxE#@IBHH% zyLD_l3ImY)wYjI^H`dtU|9XFD@tL;+8xRP9CiiHn9C|Nx0;odA4VMvEOUhzap^!uf zBqHZqJCcJh;&apH$!ot<%LB1Ph(iv|7G-t=9ker)Q63VtJzRsGG&^E0_$S0?gYp^% zUbg)>a-@})x0Lv)I0MimNDgkKPjF^)z|rg#K`tO9`j)N?%PU8$d8vHY$pFJH3jhKS zecn?Q1$4z3+$eIX+K-kyBd>!ZN$lY$_3#zOH`9F~1Lb-CBq;!em$QFw@df?nFk3G76SLUtGk`;k znMcZ2Pf_~c9ReH7Q?J8@jg~&ZMQ7ygb5tlj*Hz=u7IhCh`0QSfpQWONR}j&iGX$W848WG_Uh@)xT9X-q=hix~=3*dIYA%Kcico;zC5jwZj&E!Ed$b7;stHt1g+a-N?6Z0a zQUqTKN-GIf_WNk94hN91P>FAu$%j!ajttZ<#s;M{1Q7`-{0aRUd5ajl8AIhqdgW%$ z+cdfogiF{Z0osh*gzbbOLLleRbk*i4PM#Fjd2I-iBDs??`koMHK%Un$x3?f3WdI8+ z=E|_0C#dg<>Go2?EWhNMdtexf9H}DElYo;Xg!C3R6{#M8>z_N>-(mjc*>Ub9K}iUR z>Ay$Vs{bVyo4YA@GYkY-cB+^4g0-tNV!7x+`o%#_oRr_kU6U-2PMasX^u)U!uu16I z%^7}wO%46?zj%A^sHWDaO&t5R!L@;az(o)QDbl3`6r@+_9i(@p*MJRCy7Z3p5_*SF zR75(Vh0sD&dP`^_kPw(R_nU9l{53Pb->h%WS}ZY~ob#6b?)~g%Kkqs6k?c7-F>ROn zb9JOzeSgZ_nZm!>hlt~UKRoX<((03ar<2Z-^kS`(C8qusz$LHOztpZCqB_LWfM^11 z4th07)}_ric^19BOB);eW@!CLKw~i?G&B>)I^cu7n|!KzTs(S(J`&;k0ji)tmeQJ) zUt2wCsrTa&PQX&m_S*h3WUP78GmGG5)E|jqJ0++_eCZhI+<9?ZYibVK1GX{!g zYc)d4SpQ9)6+w~xdXgS~ih&q*kfGyBcFTF&X(+uTNS2fVeoD0(3~5g|XjkkCW%<@A@>tQH}{us?()mN|B>osfw; zn)6VfUkOGKu2NBLz8j&Y>h(w-q|ZM-!U~QePVew}6E1@%)KX<$$Ypxkc#FYwqy zXU^D2eq){F2jUkhDjWN2v6teu-ka7zTbQV-o2E86Z~N@HN1_B>#UD{M%YBb_E-?EZaJy*pK-^o&+{8|;Hnk&wGcd$A*lXB1OVl7qI0=PPB?t5Qw z?Kzg_aq7jVeWdWEBalS4bbQ zE#%k%LfMs ziEyr^s`i2cb~Bn-;hDx^B^D;AoX5^iJUbm7-4O|X=n45xEx?hXkLj+_Hd>t{ASkG) z)xyE>eIs_NN={LOi-$+nJi8@P#^N<|iJ@_`e)MWcEcy~29@Tjcz+tdgydmH*ay6mp-)F@s-b#LgPKmKN#50z%6Xqoi3aYCEHQJf zI)#Su&AmYXf^;8$y`3F9`R59cSXl*-uR8uT4H@su+QHUMrGlc2g)0*N;I}_MOe&cf zb&w%>tE!W&QuUxJ+L&+F^}%=rWo@h}^A(Ac485KT<2reW#gSJ0MEMI>-VCwHKjhF; z$qGbAW`&+}<3mwCX{59YEq@*~W2GM{vJ{p!)$AXM@Htv_TCjTl+@_vG&FBlua#icb zkB}ionR1VObDb8!H~LU!(EBMC(EHz&;ff5ob1+5@Tk{ZkrBN)z@L z!;>osD}62+%_|;-swH6fuXo1Xs5taVxPL$YfpT4_ zX@zs{?{vaecquhH#f@4>)AdP{jrihh7n+^FY9c%d(`VyZ=Om-pEFvW%%4gnA3@u1< z=24`%&6{ZfI+N75SoAAyL&EQO#CNv5iGpXmFocK56E#V~y;0SwSkaTg2-@K)C%yhs zrQG0%j~^!w(Ba-EvT+`2uar`W)C~EZ`XLJwGBQ+DuQMY?s25+oM6orvATPEPC!ajv z;82cdn5d|HR^_b+EJ^2l%>pN4!&6HsoqW;|&?Ik3*zK>-#Nev1X2!*R z?f7|ia{a!a@>st}27YMp!TRYRozjCGvDlnpX4CBTpa2y}(fjw$m9c5OGE*`AHr-wD7;eWiWx`%n06 zJFeuXdOGaK5B7lN;li5yizW6=jeh7()3mR`IWJ>AM(O9Eq(#rdbE=y3`R-LQAi3x8 zsZ^mBLx<~3-90_dYyH>b3u-KeULf3Du@Yxy$?Q`1RT7H$$KvmVD#vptKPnX;m(ePe z{7&TMNXTG-IH5DoHSw6&n8!~Pb?2u`yfiFhqc=4(o2WAZJB$=IvR+{3q{gVQXSKMNrYeIt+0TB1Hnz|>8nwDesFGpvM10MjTVc7LXpc3GF95bf z^ytIUS8{ST-(W2MoTHJ#4%xSqCwByonVkz#kOE72h~ztA#sXswE-cvA`{87q z2*$v)<3wd8Og=>k2XOGTJtk<+E z1d)rf=qtI3m6VipcfQIR3))8^2YycHDZfik-#yZ5X(=SbFylfgPN))_dd-}62PyM2 zpjYK`4wta!sye2Owv0uqRV_>!x0+J_!iL$qc6wP9+ugAy<)Fj2v<)C877 z<{`4rM$*;?76q&=7rbp^=iuNH*o}Fc$IjQ4CtijxAJw{b7b<=1;*N=%9=XQ*Cx z&HW`_4h!l~#zXvK^Zv(ofK#R2^5tdhMMKDbHV`Q(?eYz3FfqS4fH`E`=8i+@T>uVx0_|9Mm4Va@rS%*uO3AKgi z@RmuQ3PDHf(KDf|{>hVC` za{^)AB{>*P01F#Wzx}B)?1TOE5lpWGI{NFl`yUXh2dSaSB8~i)!W=eY&N(vIZRNaM z9}7Cakpi6Tzj$J*9BGZ8XqAWLL-!Bq8v>oaP_sa6kvl8=7uNvtSo>Z$ftpcM&FEoi zaG)Vs*1G-pSqQ5#Nf$1$onGnfBisv=@fyC;fFl;&XUkv0tt8}nQT(?bu)a7xq0T@9 z8hu{(`q+Dt`5!8XWw>7gGu5`55pV=6s?vEWeP|td%7%2L#*2@VIcx1NXHr-aK zlG4`ByaJrh{^6}EXMAf#L_|c*6v0#LV{*mJ7lZk`uxXjf4(A-Sgp6BFCi+CK2U+c7 z0TuHLi(!_`hkbh~@#^sXGA~g*dS-w8mmdd+kLK4mkR$v2;l&$Xpm|h|FZ;x(*8VIk z6fW)kTwi*Y!+`Ma$rv);-ZSPs45(1!5L+5p;MYW$Ub!AQg5U1F4LxpgEVi<;T3SmR zDDnzw&JcFoGzr{2%8iH=Q;k1hKGZY|ij8Amy1bnf4u{{*%e%t_?RtVe?tZjB+Mr$r zr3I10od#P<^E24F&3AoJ{yKy7?@FI~2V+IAqL42-_WU!tARbUz@?z`HDK!NLknwxv znj$9&Z)g*`l;fu}zdSOiTHBviTX7ssEDkT_YR+#I=h#EJ`K#tiCn;kwbza0ZaO& zf8_TavYed8s*{Z&`kKsG`cwRP+fLU^XFz2~Mw9l&q&o@=RlIY9-nESqh`19U<+e7! z$^sU`^{O-)g0fNICM@l<2AYlTd5$tX;~$3L(JNT=oT*-}%Vo&k59{^mifDN)L`sjv zJCa`ld)nMwd1a?;%b%Yf@w%02Uywb7h^B)LNbD z6#_R}3vKsWoqz!|JpxYibp=?Sg3(p62FW=>DdBL>faci>nS?&|GbbI6xW7RAY|dso zQt&6$SLGWs6bC}}N*iE$s>LUoJ}ewM3|0{5SNMNbefv5d)6Aao{CDc`QGh3RRdWNCB{Ft+<@ix z3_oeTX5+=7NJmAb#3dl0JfQD70@k%|{i`!R8^_JT(6)PK$!7N;z6y1r$9jy7o0B^_ zD`1Vu#oGwBls>n72{A0pWRNzN1Hn`K+i}1>(=9UPI;#4Ws5f$)w++j`WJA|Iz`RPd zmS3A~AbgxWu2IJ_bNINRl>p3r<*ocQ#uE%~U=d2v)5~X^|LV_D@4mbkzP?-l8u5cy zaSpg|*2IwAzZwF|v-4GDMFB*m;@ltmI%-_MNs|w8B;I#DaZNua2j4T)`}w0gL&Rl~ z9V+8qc=J|*cAL8>=j!6xRfz!NTZ#{BQVglEhKK5d1c|!j;ACSQVKm_Pk+b(iZJTuz>dl$fBuC7luPD(WPZsp4)#L`5aDw~Ow3RhS-z zy@Pp{Yj&m<^3D=e<6GMgvpJ%e%jvRRH-44HF$qX9Q)Lc*(plj>3un&~_XDrIsPNE| zr%y>uEvRf)bXd;30~`w_-Pk6md#EA%Z7*+9zdBsyiu}r_+@qO|()0`Ftjhoptl<#b z4f=+KwNRJ;)hq6cJu*P-2crJk6oDB`l)EcPG;{ZmEDu%J=W^57xma0Wy!#NE3wk2n z%x$AM2h5%G);&)M67}w_TwU``QrZ)|BKZVG4IcKiQ6Y9o3dT7r_M-2PJUjaMlDg8W z57QHG(?=n*r>_jFc z)QD5VERH>iOK;-v^aBJ;1fFCexsO^#ON6p2{l4+JKLus+M*eCk(%MJw^z*u>kB_ne zh8fE-l)7K(xwJddCf`Z1oT@f){gV*sYf0hiU|IaKDU;SxING~lc>W5=O+}n_XW!@* zTBxa&b7+`3)I)>Y>V4M?%1mqOyF=XED!Ef-V$;P!3_-G$iM0K5T4umoEOn*>tkK8j z6u46NVS10`rwX#r2JfYdk*IXh@})0MuQOS>xfRf)5X<9^60OSjv>}8~W@q8R`OAo5 zCqTQ_fGehqY5e~D$UMXD%pN_~BHr$_*%%j_T?t@mX@`s>QImquZz<4dM;7^v`A{1e z(5W+!ir&7hazqoE1T${!?|u=O+27EGn+HD~{<-Z>^-6GlKHK>50-GN6OO&apsqW)Y z=IB$KFAS5g->YL|OR>?XhR5$5RyPpEe{H^E27{6=15U0qH0I;u(D+JyJY}th7BB(40Ym(r9@7B;5VL)7$82qv&cDzt7^SQ2wPul2c!%z*7W`T%^SCmbhM4s^ujb^+Im`3DK>``KCh{@i438K0;1>@}hO;}%haut4i^Z5C zQZT^z%WCAWhoKfz_`mS0Vk#ax%ie@fq^o{2XTDyBXu~5KE3IhZqTaq+J z^*Pvyxo1gVjqxl2TC*WJX>1*#O}Belz8ncDfJ8M826Ju=ho5a;BXs&2O65R21{uLn zK0%Y2Bs{e>@&2>Sg=igZ?f8ow*FV<%v|_@99P-t*Q^Z%R4$FNN3}{72hH9 znwY$N_d~#k+$!SHdfgd&k$_9C+zSj?njGxIl{vysT0!0bA;miNN9BmDCRH8~9eTe3 z#)V3Qhx2^V9^KohEs*`(d8GaI4e~3k_-~RQcqC=?S85wA(>9|n>BAWZzku4XYzNfT zn>$ACAUjI0+cwBYuZ}Xz9FWwa?~a)Ozd!L#uYawE!$;U{!g=O+Ls*a7Vr+`gg|Kq~ z{KA^FUy_Qgu$7W~KI8z_Qtn6t1MCu28aI#B*wn!u-q-R0z_Y331t%s+4~=*bDpREW zN!`~7MlDv3uBH%??$CqGuoDHB%-CL(2eN;?*XS00Q@PpJ$TxQ?WGr9>S}(Ol|9^=xGKB!V>Goo4-Iy_g#sh9<=n z!7l=WW}Y#S(5CsKfh|~AP3xrKhKR%lFu@$b2;r5Vmz6xWwd`2S{S@?+c z3YP;t*V=!bHkRz;rh`+JvyavJ~n-8kUoQ zV&egdOyc}%5O!rz4;>CG(Z-iTqWa>OGXy)*fM&%O24evr z;Rss;ti-El*$GwKVQQM`M8cpplnWI84kP;!N>Er1`dmMM1`2bL-- zCh?oht?!{7ac@qA3B{QDBOFx1>!g;Fk%PdXaz~}?Wjljoj6|6ZEMi?|_lw?2L`+BS z5=_i#C}$S1J1;JnwHEUMtL?Y5BX;?E1CTn{UFhq(OR5dX`Sd1~RZ>@pJC4JFAenP>^y3 z67tptJcg_*@_0Z+vw9jG$iVNo_i1NlNh;(;^i z2}vNdigkPBzPv%nK|WjCCvtv8aLJMk#%b3N`}RbcbaG8E7gKMAx*RG3hN$VNSw9w8 zCti;*uQuKg+lptu!xPu@7kDCYj#lS4Ywus%mm|pwe}-E9#SK*r+(yQd7y)smK9qHm zt0T>Q)30abH&~^*00P9j&6{PNbZJK)dHR;sxmr+P8D0AFw<{(<$<<_1Q>b21zT4sC zPeVEpeUpu9o@;T&C(IOH3B45K*dNusKGK@oaT5GdPX^vwa(AJwxw(0Di7q=X91hG! zndpX9X_pC12Z%~%!h3Owo=MzSLCB6DMUo>;Nhuowdd!^VAWXkwc<&-t!xM z*`qnaWKPJW*hgZ=)Pog-z%$H!B+;D*)xbig&1 zpZWV3>(QPngZ8aJl)mhc8O_b)_UG5cp_FEi}QKkc6Z@2^zEd)Ih85V&VrmIKCu-!hdlRxe zFbf)zR5mpRL@8V3_94Aj$Y_g=+&~t(`VIOHlNVd}MjdVWWZyU)JNlj-q}n1S=pf;k zCEZU54}U-_M5n3^2v6iji!WuB?}EnlHf#WI7T1vh|K2R*B+BWq+S>>4nmQqF*yPRc z{2GkoTtUmv4M30}Yt-2wrggPf=mv{HqH(2_p$tR>hpuEu8R-?$I}0&yj+^Z+`@KV1 ze32>d>b~SfInN)ygkd|}=2{Ix+2P~KF95P6y8O8Z-8~}2v%9-%LI~cfkjcZBNjxu} z+j+e`fds0^3|bTp%^uMAxDtb^X!(qa-LhDkOnB2+rKD6^q~?&zYu{+|-%+rlD@#?C z${kg%*v_iAa^JnhSJO}CnspwWy^5|w?9o*@tdbILGt#-Y={I_RnQ3zRvK-B#Kds%Z zTWx8MZoUCyJIxveGa~FY%pcKm9v&^gW^9Q?K;L+X1G!t;1BAeuq4t9if-eBFF~ z^aZjkQSY_YM|}0bWgZ6=RM)I`&XYPrU>1*yiz}&w&W}&;iLqq8 z^6(gi@%v70iv=1>2|*8zZQFOS&nw7#cH8*DELCg8>v1_%vGSpS4Q}3O0iJlGW7cFQ z$Axxp>_McHWhb8#s_e~`WrX!a{ppARU|$NCPxfJ;OyqN771z2Cwvu*gsO||B+Vx<& zD8N_RdV#nWy7>_HpwsX-ARQw_r5}OEORwD~i3or#D{3#D(2G#C}6$f45CziT!JfmkMK&YRHLFFEP>iqsiRa0@1TLdA|peM zs#vRJsxEOS))MQaNPmK^#D0v2_Z84!0+h)lZ;hILi!9?L{M>lO1aa*d5qD^$tPScu zw7jQs{Dd}we24he(1yQ8yD+Pc0s+&!zzm&#`{k5)A$}9ByJUcc!8%2^CJ8$8b;P>1 zL!v{Z7lcxfx(j*OjbjlLGobL>>I^0?fcu=N*)&F%b4_q&$F5Ex zSWv`l1=B&Q1`d=*ncMVn&LnSEL6dn9AM$HV=9a!C3z4{Iyx+A7=VM&@^CvG>abL%B z^zBdQ(@p8}Uf~Z8k~2a1#f}S;?ePVG3U=t~ci-Gi?HPH4771EcsxaG4Vs<}JO24&7 zF&Arv4yXEr_;}&aQHG3RdgdDsLS1vYQU$$+Y-fNa_-a?4BmbTTGz(kNaf z>0utoXM<2sz-NiP-2W&4L-!*9Awo9n*iL=W7~=-t$k7mBGf<%rhxE=+Pz}ug+?Q-i z^4O8c>bz@%6i=Y|)f#BkDAn-8&A|6HW+j&Mw{fuyk~-H-2zsDkGTJp1XjHz$hi-eC z5nOrEgmYg_z1y=X^aFi>%NcP{)uW1YEyvUkSS4?BTz4{tJe#?SXw+LE($H^#bcnud z*|p~Mmq7QAACzimh3OM>BuqTSQ~b$r?@NBCA93@J7ZIk+*!7X+gC7O}or`KJr6C*1 zUZ4nhutQ82!9xL?GE-<_!NL!q)>M(TgrAe$jnqc$6Trm<>>WBT#V%9?sug;W)46qj z`Ibm-Z5A3sJ}c&m2Eej7andCoo`5e64@I7pAHn@#VUZMN$?7Vpk@4Te%AJd3F z*8Dsx?~}Jn4(}IxrmYLqKFfk>7PDHoom8&yk2r-zv|;Sz3}T)JRnj3jpA3;oUO-+) zeEOt5n!^kHjG){`$HIyC%65P~3o{OM)6t@XH^=5yscjNSkz;bCiUFLz4bT+84m=;c z!U0$Xi7wJFtzvE{?ijHS^1MGo%W(7NZ9G0XGBVO5MR=_LS5mMHjUPdnVA1XdlZWff zD(Gi%wk+;Z8A^JEdfxclWRE56)FOH#%l|Ad+qq=iy2&&DNH5PJg6y@jt2HW_T&Jp} z)`rgM+ZvX0SzfR{5^7_vvboBd zi{kyjUnKfDP6PlMdlybng9AnLZf8dip_b{bY4fjXMu40Mj15GIpcm|c5UDs=}&xe*(p@U>aR{tJ(20ZNs{Dh%OnBD4)zAO7yLtB zrk7joTzUYP=WuvF!>LWjG&p!h68#O0E&u>3KJib0yZy=nltk_8l+Q|Gc(zPLC~1gp zvA!)bsM$CI8XtNZ47``3l=S?r{p#<}2OqD_ox?mzri=J zeZ>M`AaXP-&j2~OWKG7K^e0nKH5090eZ$!Gmb9X#i12){_v=v zeVpkyxIOw;Iy$>kBgEriuw{U2-Vcy&1Is?h+5og}EDb;-`cchwo%<0{^~dxxf{JIb z{QsI)#{%Zfx3r5;ON z&PR^8-CtaK>qDs`hUPY%I|bkFI}D^uY)tz)%{Mm!W(WW-0}6`gHL966+<9fef}GGJow1?OX9-2810 zDn6%9NdxQfKDP3}Y_r;6cFHtRXba1lawZPl+UGZ{FOm|9T{6C5M$2?-gH2x|PV${5 zOB#_qAcvip5>1=atqJbChDtm>RqT$q9TzS+dacwNRNkI8{sKWcJf8Px9~WAD+qcIo zqW=8{`PJTNi*`f0a18#jmjM@7prH?g%M0~6v2S8P=c%db=~Beq1D za)M9Ok&lW}%O6AH5x*!9^co>NGm@g)5;Ycv{+QALqK5AeI%mC)0gYs=W{1jiWP&3v z{+idwX(@SR;I3ITneUu&?K;kv1Mujhggt$gc%rv^mH4>yNW8eU_rWjznUfwr<93I) zC+1>uE7T`>m=tw=0$+t|2W+ObJD--0-k#UcmPaT>FCJ4d@3-A7FeoGW3Lv5$(d*F% zyLybb8ST+F!6}vR9mXdi3Zym5V&v+PDYhY7Y=m`8y6ZZW@zdF zC`l-Y4ZvHyLhZC`6;dw>w4@tCHV=6AX8oAQj&tgKtvW8^o$k_6gH>qwet7eXSI=zc z9=|#DC8(VmXvq0W6+cPVY{UY z4ObL`(1EkVTMiVe7o@<#Gi)+hmA8tAF?*?ij&mZ?3`8AJ(9r1jBO%iPA}BoC?zOc; zklQb0C9yFSQ_!Dh9P6>7F9aM@8Gy+WLDEY%rQ^K;(D1ZFiQ`c<=mD00=saL1`j7O;?k6u`fHDt{g2w(7f#pk8tUS0LLUp2Y}Gt!DLoU9_x%!cDzB+I#r$ zi7o{$gTz44E>kn4jaSa=j7?@n2JB+Tm^?13wM8KSaEYMqtpzwUN&*9#ZIosJie;@yUE0>IRvl0hx$BIOl|ETu)?FYIyo zxzU6H?*S4FU@K>~=Ju*~t{s!|i=Bt!KdlkVA+3#*Q~dUi_~y&KR;nD(#4V>Jm#K~l zO4XjJ^?J(&>GI`dwo>U+r)&;&lf?Co3%i#bP!@IAfR1&&0q5hMb0XVY{Ostq>e+f| zXalsZ1C(nDkB`oJ?~Lhzq?FQGfe<@N2`TO1&6P!(W&?u0I4Cx4Ok%R9DgnVukG&<% zNO8|wx}a*ps1MDzw+O~GpQ8iOB3-7@afoFW8#o8}ddK-D7C|27S8)b`GKJ`%ScbZC zRk4&Ym5xk(Pb^1*xqoAY^xczq*-A6NnURw~44;G-Hr{U7h;?l_-o>O5m*SH8rHx{h zbDbH1zRn5DWwdPJ?ImTvjpK6{Kqa16nZlbUIg3)%7X+zE3__8jl(VvmH#Kv$faJgW z793bBr$Ywm9lPUTN*yI&dRP5;Datuje^Y5N+OT`>BhcVDRq7ZB!pb(v1?6Fb>~W zzi~4gu4a>HX?O}iEGox8D*k)w%q5T-BBPXXPMhK9c#<)#2|yOs>gQMyIvHBElXESx zDmLW`UH?kGs&H#->*^<8b&Bo;E3J{$Qi2MPMrGiP{VpAs9;i2j4oF>RoFZq-44CvM zU7p~?;m6-p?CsACAPOE;V+29r<>%jffF^Rrc*{gn}+bO-HO|Q~1wLfnP@O{m9Ir!hZ1L)5<$9n(~^moioK6n!J5% z-}4Qtl8v(LaJ0#qFSGl4d3N@sUwm&&D(x@`9xLFfyf}H~s#)2`Z5bVCmEnSbrw@YO zdE$EqpV{RXE-snueT&OQU#%`MnZ6*J0`4`M@$~c_hu8($hSL=#6_qhi6>qSe4atK0 z%m4oUVs~#(eOUtkCZ2ZW7Tbo6 z-=D8sheH*s96p~F<%cI7lQx&v)0Z#gFH%Tum60v_iM(>76;`3f^q%<7*)%a)8eU8(9giY8N?5vhT%Tm#h^Z|9`C`0lG1JkAH? z_QPh)R`ulIb{)0%e*A!3i-?L4m?21{ut(=eS+GQBlmbB@vfTVdPRwRiNy*(T_}>Mf zg<@MRe;Z3drIx^z_(>EoeU?;DarJRfWO;ZB@;^qf9k$h}L% zRWh9z@r1GlIQ0tR$S1kKln#zBA<%#%F9Ep>r3_TGZ}B)ZTWt-MHf`-|W4}OAR7vJ) z|Lj0yw|E(;<*mAaB(t`@10?2wZPRR&AAJ_DOV&CQK=YjgLB%J_!{VHZo9 zeDZI5q54;S5|{KBGkv!pdD&z|`UFISJMwAOe!$o_JyuE9e|0TuubeBdt}T0QHzk+U zAtDw zvPn<`N(*A%0NDVjgdu0&1(hkE+n7xC^?mQ73KGu8%uG~c6l4y>Qm;W6=pb&Yw?mqX z0lq4WHaS>btr3#&SF zzY1xeYJX?GIseU{7ic_y8f145FEA;5iOS?Z8mjhZef|5QHI?A|>4pEU$oh9)!0wIP zAb?X2PKxjc9RHJO>;LMMw0mY$RHTnlQTiM97iYZ(qdwtEtd8Q-bZ>y0yp948TKv5@ z>oXGdy}Gs3)l|kbU5fbgU$Lq9`=+L1JE)N05cwxmMDfxoB<;c-O0#<(0_B@rbB0=A zp4oyhf%NjEpQq#^<$4f`oRxC?zm>sVrG2g|(gja7Szt0M!3E zW)$`F{dt$kR=gzGDnl7P$$zJ%*xn6#GP(h3v2BLAYGwa1!2g~M^nR35bN&G3WJ^W0 z=l`0_fBg{jFRq?_O)~p&6z%CVv;_9}fTQ>h3$%Om!INvbOs}f9Z^4+%fN~JnaW~NZ zP>%m2%LYEf=qsudJXUyJy!9IR);~Q0X=m@5$rgc;F#0-#RCKG9j^U%Mv!|k6|6i{B zzh72~eWMq?hEzte;8t%^8vNDq!2|Xut>-gt%y`8$j~D;<4t?6D1 zd#nZme(?Q-$`P#5q+GTwWK)~+ISAWKVA~hvmsi^VS5EAI=vV(&SHTJ0K*<66oaS~A z+s|d|cYIkW8Xd4V>RaaJB=M&A=j&pa=PGzN4AxMEUiEHEvaXnV{S`zug^JD}7>EN~ zRd`d6!iF=SSnbtZzHA2-JF>oT$!zd_Kcxey_spK!nMmOndD95ddL&=}qS)59$Bz>p zd1hX<*K|6BW%-;8dN9q^spoT^vb(d$z+xbN#MBS_TP0sqS9}(~YP8F^CAS3!3$#$vPiv4W;87zce9(SE+-u5dcr|H2 zbm_mTS8hDMZY>iGD?XXuhE4M16l-$u>b9uR*m-t?CVnM9=m|gQ-F|E)5js-_W&7SG z*mU=jgx7A#MS@dB^@yeNSW=|@9fq4WKH&A@*u+YUtDo7orgq`;!KGP0L*asomk1aG za)Ae%g{N1}?n?Tu!0KcX9fh&)4@P^e(Mx*M+IBXgc=}Fhzeg&-13da*8TB5P-Hien3;5qAkm#@3k5Y=~842y`Ijk0M?x#O-j-JP!CQ>YA zwM3~MeafYt+M)v5IaJV3K^mk;)uc1w+R%TR+~nOMs3-2pw6{F*wH>EK>k6{%o5AJ7 zAne8l-4|x^yneA8|G>}|7R7x3d)MuN6v@?Z&*((9!}JwM({z}L$f{U1iyaMa&>M2A z=vY`C=L2Q=h@^=E@yG?2Y7mT)R+v#&@wXS zyYa%fH)r$~8j6FiX(2+obeN3(zQt5{-8rR9;~nkVNsC>>4oh2+o~oU4{M?)x@esy> zSL*5+M;hqbG+*o(QQ-?eFSL(Ad3Wdb%akb}7fqgD` zhtoKH{a;|;$a`JoXg{-^S1RSSt0-@bQm9o@*(088m-2L|)k z=E<2O;pL16^jCC}V5rfLHJCmmQa!%@2$5tg98_zz?nB#IW%@5o9I98>MH=kn7xRko zVC-SY{s7P$tr0BPmH8?w8Fr@qxXsIgKaeUg^%a2~yN#pS0z2oJAoZMGWQoYF3>5ur zi)T=oVrLVs#_zxrtaj+pj%7Z&baudr{G4!&kkM)RB0BxC1&1Gv^4L5Rqlh1|`5I-@ zF4Y|F9`g(xf~0^^KbiW|AaUK;;UEH%E*z8<*iNjRkwKgA&iElg;k#4-!!tUkN7C(d zG_gJf6Z`AvK=QpAye!QkT5v|wFI2B#U8KR}6w$t)Tn5qA92;86?_?ks#oxTOHQC9= z1Zkd~syw!2IyxjuRx^;&kEb77w{(szal>%GVmBxK!umxfHjsYN2ghj^hLFqblqsJO z7p~8F68LnSv(0_SI0uD6rE*2;l|N5|GaHmRctC|c-UQOz&Ba2)OUdKyLhuO}`(=at z+w>l0;O%o#gs&PEN46bih~E#JI5ras?!S4!WcUe6ZvSnt zb$I&lEYLS!X990`ZSkDN-!rqgTe3@|o8s-|OXOPkFxwCcn)EAVVPqs^#naf#{BFsL zdGNscLQZHvNp}`x&U@*PIJq$xc%eOFuNwWQzYX)sC8WGaZ~HPGb8JZ~sS68Myd4SR zj&{dJm|!OtG1sF+q$+&+dpA;#Jl_WKv>PDmN%-32pb-=-F~@(8c7}kRDmkUbiOf70 z(ACAAVY{cNQKX`xHgP1>)8ITC=xW%cmT?N6mL1geXm$5UO&l|JK_@BBG8u@#E#%cr zAW2*+pWV!#wD>d~jD-o!w1)T;Wj%VtZt+K5J9#LAg_a?Bk5uDZ+URZ!yQN4oh4*-O z4&KBNiZL}c!+(E02%o}hW&I2+(Xh^Mx50{~`SO8={Zs5UC9xH`9PBzeTn_&hQ@?@3 z8#mY_gNs-T^qG5xMJA8P#R}_3`kLu2{Qf{MZSUd|1(vC6{jAKmq|Q70r`g9&$NroZ zCOi4;j%#n%oJQK9MOg@QKC>tJppc-W=AHOw!Rk&Bn?g;EKt%MX#wi@af^Oqiwku&1 zrtz)^7w~N@$t^lM@{GlxDKEb}%R)O-2FkJTxRhwW!NMgQNVx&Q+udhNr1wwizG16n zYmFJ)P3J=L!;IXMv9VlLmaXZUXMp*2k3`}s!0RXC_;nDJts;xhsV{HUh*|9;CS{_0 zJI;pbRe8XI?tN~5F8^Tzc<{-|&asY-;G;iAk2P|^E)>{Ym0Bp5Vq#Zx2v6Zul8oFtqgsvb~eDz*0$(xr34=}O(gQ@ z>}2Ck{rszL%*e^TU0mAA2j?=#tovB|p6Vi)7|L75dz>~bU?Ofaf=hh{kB0(vZqWtF z&5BhB%(zN+##M&6yZ~$R!Xob%aOFEJbTANaN^HaJg~)B7Pfs-#;(qUh6L4|j(!&lY z_nBcU`lijndo0CG=2*)cQ|LLhGNP7Qg9Az)u^+6C=oEhLn!TCZkK8Z}#tL!9tagb7 zIh;i8tVy3@7(;~`VW_fDR73Yx>AIZH(ngMAJka)i*~*x-BZ}9-HgdmFg1UKNYX{h_ zi@&Osc116`VK_nZ2=fdonrG39Xd&dN|5Tz$fQK`O?ALe`YaekSh>L+fvnCP+>#h;NYt+w{HxaUikSQ<;`qe}Fu(k3en|52W4aBTaN9S(K3qQJbaK>}b;uII>5bcGfot01*trf3CK`;m$j4vkA==W;Oqq<&te` zWn#;ZHP~e+!I#fM3Ut6PF$s&O#@`_P=ywcS;XEKn1o5 z&RKc>lq2~TYy@-0qN%rxj0Q zhfSRxnn#Q4g*=jkBOAJF$7gpL5NY;dU$vK_T)l!bD@~#|8z z7O%U}>%cCif(6*Q!CmjSe~n+l)-*v|T3bsUYF}KT8_piA{ddczc$l1QP_SvaykxPO zET=N>jE_)bJ{y~GvjO84*AM^14J(vECIk{giaHJ@bt542X z+bS>beiw+mDt(=|;%oP1V{DEfi;|S&VrrN*mpuG5E#X7}LtbJHtOf-T=*i~g_R-=B zqV)LFM1rJ#7B=^YhOm?lYZVQ@X8n!|=x6=k<+@ zBJz0NMt*$+e;fH4b|0D4SpEIG%8Sbn&U|{W@z3891##a8Ns4@$f~=oy1f#df$0CS9 zaROOw`Ku3FXr(1bl`b<`Qb3^Ysh3_|4aNqeD8(fZ+Wo+jR%`HpFL72`x#eD3-4B4WMowP(QIkku<*8QMU|Q&W>Z%eM z-!8B|+nD^}z%^Iit9V^$!rX|LHhf8=J!|^;P;p6B*PrDuzZ)!RL*_pZeLZVc_vrNJ z=jB~~>Kl293)&l1UGBr9V}_Mmt$}Q(P`p`5s`$F z-1qJ=7Y$26Iw5OTm7pySjSBCZUgR`32nY2mh0n?6;0-1cXDfN*v~}DDtXdKlOn*st zgq(<48_*4N^`-e;JhX)~QsV2^c5ss_GmNjr!D<)DQ+)SCzx{wMUD&HepG;_;&ps(N zZrt)UxZ#7}m9Ma9e{OBDmiL=%vNm!8@m(31DU}xI`i6aSpwypS^;m?iEyotq{*U-H z6eWLPLZ(dpT!u-+!9}CVVAU~%jEhH)u<3H{*MAy<8asiacb6Thb*eCA$^Ho046 z!b=O?{XIJacXsZuHn=u0_fC@Qcd|~#ye(bOdg{&;3Sm(=p!>?mZE+aftxc5VzIqiB??WPVnm!B4V#NH*Z$7$-K)-aJ!^^Pl< z7#r<6eWra>sLRLz8|U<(b@y1nQH+6jd9u1tb6v63JwZg<6R%x|WvMdow!ZHxN_4|H zqtP1%nK$9pS77ES|4h7>XQP+B#_Dj^{$RjF$wKWMxarfEzZ>(~y%29iH(ZP4ZXb=;oDAr_%2fyus?2t3frjsgDubAx!wDM zs+9~4j8CLOeqdb#JFFqsR}C#7#$k^8U#KPgmfv^n-Bv(4L#(8H*F&kO`U<%3 zq#TyQKXZpu<^VhICu`u6|=` zc!}5U?&)UeG_~jqRE9UXPvrExcmDq0I=j}tq_Q<^&2jv7Q)j|7OKqA&P1Dgb(vlh* zHBBiY)Ci<9F9)xAL(` ziI1Rl{yS;`&TkpZy*ec1pKpmlmskFoJ1gFptvfBV8WB%wtA#o)vSigi+)tB5qmL;{ z-f;z^)ftK#w-3=YbVmi;*$Z`{VIlVfrpkrPX|WW^oo7i8pBT4$7+zlhB`j11@0w!( zosXLS7;sj9`dHpnO}ZP0C{(=f#|+ebM&P?-a)Z}fPFh90d(h2EwhS&zj94=_d)wg! zZa#l$aE2PS)U$c(<^Wllq2(y2JZDPy3BYFH@Xk_(FL;{;1jC$m!->?g`T3y)4R)}i z6=4|)iqPm46a#S0ofzXUHCrhs*PK|A8JCOf7YQW$Ku3`%$m$`)?L}s!hAIqYqKvLv zy93ET$6XD9bpz+h*?EtbY(#;v*{X+@K<3KgW*lZaHf`1)9JQo4FEX=SXdhZob}l<4 zz%Am$eFu__CGeMTWj4)Z8}^ldQl{wWY{Zp6VL&a$RF}I?qZ*Hl&kb?9H_(*EE?%*9 zKk4ZgP-}Og1mKv5N3NYZ%4!h3bml9R6D}xLd{c&4+2;4$9Q}j=7NF~c-w_(snp1F* z*kLDux5y5yrW~53bW*o3Tne$Wl$Go$%8HMW0((&Q&!<_rXW@oZxuaeLC_ z+Yjgh`bb$;Cw6BY#7c<6=8)b*}#}YG2ZVGe8t;&ni@=D$s4qK&022SM3N0MgoBAQ+*(x$%ZaQ%7k`36qh z7Y1%9x+WWm8#c0q*w}R6U-4wiLeGhy4s{q&hQ2^clc2-?)kLE-%cGL{byuVlNfrNp$sEOj)KZCoj2sYu`-qYGmogTcheVtK(R|hU0As< z-|^lY!+3NtId;4T@2P~Cj^&cfLn$8W=C7Z5vU+TtIPT`x3W&= z-E8x#7K0=}lQ+IO%P@|ow>u&YAUJz(pEIWi2OxUW0D^KSqsJKVgySZ$x$3Ryn1;8> z4s*xexo)$FttDd#JQITQjTcHiSe?wvIf_RSc8gyG@q(FQ*MW!acqZ3Q?oRXue@aa= z?p4b>`~{qCmZ6GGCUUryCVs9uo%i-hOlz3$Zg5@KUT>U;S$ezRy0BdSd*oJ=`Ko1o z^U&NWfeHOXDdB zAz)4IMN|l;66EO_Xo-!MIAq37_sxp!1O1zgX^2x1JHvgo*-zQyf7NZzO&`+0ibH!K zz~K8r6VnXpf+4~wNaUD*l6j1sO^C&!w}pMOk0_0u-+iTT_&A|HaQyCmVt3^eBWg|P zNK7iU!*{7?dJg@Pqr@@IhX9mBa?l>(e(Mo>-BvdcfdSZBR8YCT5Lzn5b`!tf&PY>x zy7jjf5X1EFOYhXub8Ed}z#>+Gs`bLqfk1wE$%kUC%JmEuwe+`ecO&yM9|By^(fM)` zv~G8iVx6s(-T@Q?d8}Jwm}gh4i=+k~y;|tjxsh4%RrEdS|J5v`+ - - + inkscape:version="1.0.2 (e86c8708, 2021-01-15)" + sodipodi:docname="drawing.svg" + inkscape:export-filename="/Users/lpetre/Desktop/rect846-0.png" + inkscape:export-xdpi="191.53999" + inkscape:export-ydpi="191.53999"> - - - + @@ -63,223 +58,440 @@ - - - - - - - global scope - - - - class scope - - - - function scope - - - - - comprehension scope - - - - ITERATIONS = 10class Cls: class_attribute = 20 def fn(): for i in range(ITERATIONS): ... return [ i for i in range(10) ]Cls().fn() + id="layer1"> + + + + + builtin scope + + + + class range(stop) ... + + + + + + + global scope + + + + ITERATIONS = 10Cls().fn() + + + + + + + class scope + + + + class Cls: class_attribute = 20 + + + + + + + function scope + + + + def fn(): for i in range(ITERATIONS): ... + + + + + + + comprehension scope + + + + return [ i for i in range(10) ] + + diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index f6c9c078..0db0e7f3 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -40,8 +40,8 @@ The wrapper provides a :func:`~libcst.metadata.MetadataWrapper.resolve` function .. autoclass:: libcst.metadata.MetadataWrapper :special-members: __init__ -If you're working with visitors, which extend :class:`~libcst.MetadataDependent`, -metadata dependencies will be automatically computed when visited by a +If you're working with visitors, which extend :class:`~libcst.MetadataDependent`, +metadata dependencies will be automatically computed when visited by a :class:`~libcst.metadata.MetadataWrapper` and are accessible through :func:`~libcst.MetadataDependent.get_metadata` @@ -134,14 +134,15 @@ New scopes are created for classes, functions, and comprehensions. Other block constructs like conditional statements, loops, and try…except don't create their own scope. -There are four different type of scope in Python: +There are five different type of scope in Python: +:class:`~libcst.metadata.BuiltinScope`, :class:`~libcst.metadata.GlobalScope`, :class:`~libcst.metadata.ClassScope`, :class:`~libcst.metadata.FunctionScope`, and :class:`~libcst.metadata.ComprehensionScope`. .. image:: _static/img/python_scopes.png - :alt: Diagram showing how the above 4 scopes are nested in each other + :alt: Diagram showing how the above 5 scopes are nested in each other :width: 400 :align: center @@ -175,6 +176,9 @@ assigned or accessed within. :no-undoc-members: :special-members: __contains__, __getitem__, __iter__ +.. autoclass:: libcst.metadata.BuiltinScope + :no-undoc-members: + .. autoclass:: libcst.metadata.GlobalScope :no-undoc-members: diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index 477a631f..01e2514b 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -33,6 +33,7 @@ from libcst.metadata.scope_provider import ( Assignments, BaseAssignment, BuiltinAssignment, + BuiltinScope, ClassScope, ComprehensionScope, FunctionScope, @@ -60,6 +61,7 @@ __all__ = [ "BaseAssignment", "Assignment", "BuiltinAssignment", + "BuiltinScope", "Access", "Scope", "GlobalScope", diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 77ceafd1..919810ea 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -338,13 +338,12 @@ class _NameUtil: name_prefixes.append(scope.name) elif isinstance(scope, FunctionScope): name_prefixes.append(f"{scope.name}.") - elif isinstance(scope, GlobalScope): - break elif isinstance(scope, ComprehensionScope): name_prefixes.append("") - else: + elif not isinstance(scope, (GlobalScope, BuiltinScope)): raise Exception(f"Unexpected Scope: {scope}") - scope = scope.parent + + scope = scope.parent if scope.parent != scope else None parts = [*reversed(name_prefixes)] if remaining_name: @@ -536,27 +535,57 @@ class Scope(abc.ABC): return Accesses(self._accesses) +class BuiltinScope(Scope): + """ + A BuiltinScope represents python builtin declarations. See https://docs.python.org/3/library/builtins.html + """ + + def __init__(self, globals: Scope) -> None: + self.globals: Scope = globals # must be defined before Scope.__init__ is called + super().__init__(parent=self) + + def __contains__(self, name: str) -> bool: + return hasattr(builtins, name) + + def __getitem__(self, name: str) -> Set[BaseAssignment]: + if name in self._assignments: + return self._assignments[name] + if hasattr(builtins, name): + # note - we only see the builtin assignments during the deferred + # access resolution. unfortunately that means we have to create the + # assignment here, which can cause the set to mutate during iteration + self._assignments[name].add(BuiltinAssignment(name, self)) + return self._assignments[name] + return set() + + def record_assignment(self, name: str, node: cst.CSTNode) -> None: + raise NotImplementedError("assignments in builtin scope are not allowed") + + def record_global_overwrite(self, name: str) -> None: + raise NotImplementedError("global overwrite in builtin scope are not allowed") + + def record_nonlocal_overwrite(self, name: str) -> None: + raise NotImplementedError("declarations in builtin scope are not allowed") + + class GlobalScope(Scope): """ A GlobalScope is the scope of module. All module level assignments are recorded in GlobalScope. """ def __init__(self) -> None: - self.globals: Scope = self # must be defined before Scope.__init__ is called - super().__init__(parent=self) + super().__init__(parent=BuiltinScope(self)) def __contains__(self, name: str) -> bool: - return hasattr(builtins, name) or ( - name in self._assignments and len(self._assignments[name]) > 0 - ) + if name in self._assignments: + return len(self._assignments[name]) > 0 + return self.parent._contains_in_self_or_parent(name) def __getitem__(self, name: str) -> Set[BaseAssignment]: - if hasattr(builtins, name): - if not any( - isinstance(i, BuiltinAssignment) for i in self._assignments[name] - ): - self._assignments[name].add(BuiltinAssignment(name, self)) - return self._assignments[name] + if name in self._assignments: + return self._assignments[name] + else: + return self.parent._getitem_from_self_or_parent(name) def record_global_overwrite(self, name: str) -> None: pass diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 27a8f495..59a20aec 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -13,6 +13,8 @@ from libcst import ensure_type from libcst.metadata import MetadataWrapper from libcst.metadata.scope_provider import ( Assignment, + BuiltinAssignment, + BuiltinScope, ClassScope, ComprehensionScope, FunctionScope, @@ -144,6 +146,11 @@ class ScopeProviderTest(UnitTest): self.assertEqual(len(scope_of_module[builtin]), 1) self.assertEqual(len(scope_of_module["something_not_a_builtin"]), 0) + scope_of_builtin = scope_of_module.parent + self.assertIsInstance(scope_of_builtin, BuiltinScope) + self.assertEqual(len(scope_of_builtin[builtin]), 1) + self.assertEqual(len(scope_of_builtin["something_not_a_builtin"]), 0) + func_body = ensure_type(m.body[0], cst.FunctionDef).body func_pass_statement = func_body.body[0] scope_of_func_statement = scopes[func_pass_statement] @@ -1687,3 +1694,70 @@ class ScopeProviderTest(UnitTest): cast("3rr0r", "") """ ) + + def test_builtin_scope(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + a = pow(1, 2) + def foo(): + b = pow(2, 3) + """ + ) + scope_of_module = scopes[m] + self.assertIsInstance(scope_of_module, GlobalScope) + self.assertEqual(len(scope_of_module["pow"]), 1) + builtin_pow_assignment = list(scope_of_module["pow"])[0] + self.assertIsInstance(builtin_pow_assignment, BuiltinAssignment) + self.assertIsInstance(builtin_pow_assignment.scope, BuiltinScope) + + global_a_assignments = scope_of_module["a"] + self.assertEqual(len(global_a_assignments), 1) + a_assignment = list(global_a_assignments)[0] + self.assertIsInstance(a_assignment, Assignment) + + func_body = ensure_type(m.body[1], cst.FunctionDef).body + func_statement = func_body.body[0] + scope_of_func_statement = scopes[func_statement] + self.assertIsInstance(scope_of_func_statement, FunctionScope) + func_b_assignments = scope_of_func_statement["b"] + self.assertEqual(len(func_b_assignments), 1) + b_assignment = list(func_b_assignments)[0] + self.assertIsInstance(b_assignment, Assignment) + + builtin_pow_accesses = list(builtin_pow_assignment.references) + self.assertEqual(len(builtin_pow_accesses), 2) + + def test_override_builtin_scope(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + def pow(x, y): + return x ** y + + a = pow(1, 2) + def foo(): + b = pow(2, 3) + """ + ) + scope_of_module = scopes[m] + self.assertIsInstance(scope_of_module, GlobalScope) + self.assertEqual(len(scope_of_module["pow"]), 1) + global_pow_assignment = list(scope_of_module["pow"])[0] + self.assertIsInstance(global_pow_assignment, Assignment) + self.assertIsInstance(global_pow_assignment.scope, GlobalScope) + + global_a_assignments = scope_of_module["a"] + self.assertEqual(len(global_a_assignments), 1) + a_assignment = list(global_a_assignments)[0] + self.assertIsInstance(a_assignment, Assignment) + + func_body = ensure_type(m.body[2], cst.FunctionDef).body + func_statement = func_body.body[0] + scope_of_func_statement = scopes[func_statement] + self.assertIsInstance(scope_of_func_statement, FunctionScope) + func_b_assignments = scope_of_func_statement["b"] + self.assertEqual(len(func_b_assignments), 1) + b_assignment = list(func_b_assignments)[0] + self.assertIsInstance(b_assignment, Assignment) + + global_pow_accesses = list(global_pow_assignment.references) + self.assertEqual(len(global_pow_accesses), 2) From 1bd1c0890c0a0cdd77423c56a189da0aaebb6137 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 26 Mar 2021 17:15:57 +0000 Subject: [PATCH 107/632] Add FullyQualifiedNameProvider (#465) --- docs/source/metadata.rst | 8 ++ libcst/codemod/_codemod.py | 2 +- libcst/metadata/__init__.py | 6 +- libcst/metadata/full_repo_manager.py | 5 +- libcst/metadata/name_provider.py | 105 ++++++++++++++++- libcst/metadata/scope_provider.py | 27 +++-- libcst/metadata/tests/test_name_provider.py | 121 +++++++++++++++++++- 7 files changed, 259 insertions(+), 15 deletions(-) diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index 0db0e7f3..bdda7a21 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -203,10 +203,18 @@ We don't call it `fully qualified name `_ when timeout. """ diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py index 5072399b..516db2b4 100644 --- a/libcst/metadata/name_provider.py +++ b/libcst/metadata/name_provider.py @@ -3,12 +3,19 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Collection, Optional, Union +import dataclasses +import re +from pathlib import Path +from typing import Collection, List, Mapping, Optional, Pattern, Union import libcst as cst from libcst._metadata_dependent import MetadataDependent from libcst.metadata.base_provider import BatchableMetadataProvider -from libcst.metadata.scope_provider import QualifiedName, ScopeProvider +from libcst.metadata.scope_provider import ( + QualifiedName, + QualifiedNameSource, + ScopeProvider, +) class QualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedName]]): @@ -76,3 +83,97 @@ class QualifiedNameVisitor(cst.CSTVisitor): self.provider.set_metadata(node, set()) super().on_visit(node) return True + + +DOT_PY: Pattern[str] = re.compile(r"(__init__)?\.py$") + + +def _module_name(path: str) -> Optional[str]: + return DOT_PY.sub("", path).replace("/", ".").rstrip(".") + + +class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedName]]): + """ + Provide fully qualified names for CST nodes. Like :class:`QualifiedNameProvider`, + but the provided :class:`QualifiedName`s have absolute identifier names instead of + local to the current module. + + This provider is initialized with the current module's fully qualified name, and can + be used with :class:`~libcst.metadata.FullRepoManager`. The module's fully qualified + name itself is stored as a metadata of the :class:`~libcst.Module` node. Compared to + :class:`QualifiedNameProvider`, it also resolves relative imports. + + Example usage:: + + >>> mgr = FullRepoManager(".", {"dir/a.py"}, {FullyQualifiedNameProvider}) + >>> wrapper = mgr.get_metadata_wrapper_for_path("dir/a.py") + >>> fqnames = wrapper.resolve(FullyQualifiedNameProvider) + >>> {type(k): v for (k, v) in fqnames.items()} + {: {QualifiedName(name='dir.a', source=)}} + + """ + + METADATA_DEPENDENCIES = (QualifiedNameProvider,) + + @classmethod + def gen_cache( + cls, root_path: Path, paths: List[str], timeout: Optional[int] = None + ) -> Mapping[str, object]: + cache = {path: _module_name(path) for path in paths} + return cache + + def __init__(self, cache: str) -> None: + super().__init__(cache) + self.module_name: str = cache + + def visit_Module(self, node: cst.Module) -> bool: + visitor = FullyQualifiedNameVisitor(self, self.module_name) + node.visit(visitor) + self.set_metadata( + node, + {QualifiedName(name=self.module_name, source=QualifiedNameSource.LOCAL)}, + ) + return True + + +class FullyQualifiedNameVisitor(cst.CSTVisitor): + @staticmethod + def _fully_qualify_local(module_name: str, qname: QualifiedName) -> str: + name = qname.name + if not name.startswith("."): + # not a relative import + return f"{module_name}.{name}" + + # relative import + name = name.lstrip(".") + parts_to_strip = len(qname.name) - len(name) + target_module = ".".join(module_name.split(".")[: -1 * parts_to_strip]) + return f"{target_module}.{name}" + + @staticmethod + def _fully_qualify(module_name: str, qname: QualifiedName) -> QualifiedName: + if qname.source == QualifiedNameSource.BUILTIN: + # builtins are already fully qualified + return qname + name = qname.name + if qname.source == QualifiedNameSource.IMPORT and not name.startswith("."): + # non-relative imports are already fully qualified + return qname + new_name = FullyQualifiedNameVisitor._fully_qualify_local(module_name, qname) + return dataclasses.replace(qname, name=new_name) + + def __init__(self, provider: FullyQualifiedNameProvider, module_name: str) -> None: + self.module_name = module_name + self.provider = provider + + def on_visit(self, node: cst.CSTNode) -> bool: + qnames = self.provider.get_metadata(QualifiedNameProvider, node) + if qnames is not None: + self.provider.set_metadata( + node, + { + FullyQualifiedNameVisitor._fully_qualify(self.module_name, qname) + for qname in qnames + }, + ) + return True diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 919810ea..f0becc76 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -285,17 +285,26 @@ class _NameUtil: return _NameUtil.get_name_for(node.name) return None + @staticmethod + def get_module_name_for_import_alike( + assignment_node: Union[cst.Import, cst.ImportFrom] + ) -> str: + module = "" + if isinstance(assignment_node, cst.ImportFrom): + module_attr = assignment_node.module + relative = assignment_node.relative + if module_attr: + module = get_full_name_for_node(module_attr) or "" + if relative: + module = "." * len(relative) + module + return module + @staticmethod def find_qualified_name_for_import_alike( assignment_node: Union[cst.Import, cst.ImportFrom], full_name: str ) -> Set[QualifiedName]: - module = "" + module = _NameUtil.get_module_name_for_import_alike(assignment_node) results = set() - if isinstance(assignment_node, cst.ImportFrom): - module_attr = assignment_node.module - if module_attr: - # TODO: for relative import, keep the relative Dot in the qualified name - module = get_full_name_for_node(module_attr) import_names = assignment_node.names if not isinstance(import_names, cst.ImportStar): for name in import_names: @@ -308,7 +317,11 @@ class _NameUtil: real_names = [".".join(parts[:i]) for i in range(len(parts), 0, -1)] for real_name in real_names: as_name = real_name - if module: + if module and module.endswith("."): + # from . import a + # real_name should be ".a" + real_name = f"{module}{real_name}" + elif module: real_name = f"{module}.{real_name}" if name and name.asname: eval_alias = name.evaluated_alias diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index f995b65a..107a74e7 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -3,17 +3,22 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +from pathlib import Path +from tempfile import TemporaryDirectory from textwrap import dedent from typing import Collection, Mapping, Optional, Set, Tuple import libcst as cst from libcst import ensure_type from libcst.metadata import ( + FullyQualifiedNameProvider, MetadataWrapper, QualifiedName, QualifiedNameProvider, QualifiedNameSource, ) +from libcst.metadata.full_repo_manager import FullRepoManager +from libcst.metadata.name_provider import FullyQualifiedNameVisitor from libcst.testing.utils import UnitTest @@ -25,8 +30,29 @@ def get_qualified_name_metadata_provider( def get_qualified_names(module_str: str) -> Set[QualifiedName]: - _, qnames = get_qualified_name_metadata_provider(module_str) - return set().union(*qnames.values()) + _, qnames_map = get_qualified_name_metadata_provider(module_str) + return {qname for qnames in qnames_map.values() for qname in qnames} + + +def get_fully_qualified_names(file_path: str, module_str: str) -> Set[QualifiedName]: + wrapper = cst.MetadataWrapper( + cst.parse_module(dedent(module_str)), + # pyre-fixme[6]: Incompatible parameter type [6]: Expected + # `typing.Mapping[typing.Type[cst.metadata.base_provider.BaseMetadataProvider[ + # object]], object]` for 2nd parameter `cache` to call + # `cst.metadata.wrapper.MetadataWrapper.__init__` but got + # `typing.Dict[typing.Type[FullyQualifiedNameProvider], object]` + cache={ + FullyQualifiedNameProvider: FullyQualifiedNameProvider.gen_cache( + Path(""), [file_path], None + ).get(file_path, "") + }, + ) + return { + qname + for qnames in wrapper.resolve(FullyQualifiedNameProvider).values() + for qname in qnames + } class QualifiedNameProviderTest(UnitTest): @@ -325,3 +351,94 @@ class QualifiedNameProviderTest(UnitTest): self.assertEqual( names[attribute], {QualifiedName("a.aa.aaa", QualifiedNameSource.IMPORT)} ) + + +class FullyQualifiedNameProviderTest(UnitTest): + def test_builtins(self) -> None: + qnames = get_fully_qualified_names( + "test/module.py", + """ + int(None) + """, + ) + module_name = QualifiedName( + name="test.module", source=QualifiedNameSource.LOCAL + ) + self.assertIn(module_name, qnames) + qnames -= {module_name} + self.assertEqual( + {"builtins.int", "builtins.None"}, + {qname.name for qname in qnames}, + ) + for qname in qnames: + self.assertEqual(qname.source, QualifiedNameSource.BUILTIN, msg=f"{qname}") + + def test_imports(self) -> None: + qnames = get_fully_qualified_names( + "some/test/module.py", + """ + from a.b import c as d + from . import rel + from .lol import rel2 + from .. import thing as rel3 + d, rel, rel2, rel3 + """, + ) + module_name = QualifiedName( + name="some.test.module", source=QualifiedNameSource.LOCAL + ) + self.assertIn(module_name, qnames) + qnames -= {module_name} + self.assertEqual( + {"a.b.c", "some.test.rel", "some.test.lol.rel2", "some.thing"}, + {qname.name for qname in qnames}, + ) + for qname in qnames: + self.assertEqual(qname.source, QualifiedNameSource.IMPORT, msg=f"{qname}") + + def test_locals(self) -> None: + qnames = get_fully_qualified_names( + "some/test/module.py", + """ + class X: + a: X + """, + ) + self.assertEqual( + {"some.test.module", "some.test.module.X", "some.test.module.X.a"}, + {qname.name for qname in qnames}, + ) + for qname in qnames: + self.assertEqual(qname.source, QualifiedNameSource.LOCAL, msg=f"{qname}") + + def test_local_qualification(self) -> None: + base_module = "some.test.module" + for (name, expected) in [ + (".foo", "some.test.foo"), + ("..bar", "some.bar"), + ("foo", "some.test.module.foo"), + ]: + with self.subTest(name=name): + self.assertEqual( + FullyQualifiedNameVisitor._fully_qualify_local( + base_module, + QualifiedName(name=name, source=QualifiedNameSource.LOCAL), + ), + expected, + ) + + +class FullyQualifiedNameIntegrationTest(UnitTest): + def test_with_full_repo_manager(self) -> None: + with TemporaryDirectory() as dir: + fname = "pkg/mod.py" + (Path(dir) / "pkg").mkdir() + (Path(dir) / fname).touch() + mgr = FullRepoManager(dir, [fname], [FullyQualifiedNameProvider]) + wrapper = mgr.get_metadata_wrapper_for_path(fname) + fqnames = wrapper.resolve(FullyQualifiedNameProvider) + (mod, names) = next(iter(fqnames.items())) + self.assertIsInstance(mod, cst.Module) + self.assertEqual( + names, {QualifiedName(name="pkg.mod", source=QualifiedNameSource.LOCAL)} + ) From 9a6fd5665333384ad8172cc74ea5db9872f4b157 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Mon, 29 Mar 2021 01:08:05 -0700 Subject: [PATCH 108/632] Improve handling of raw fstrings (#462) (#466) --- libcst/_nodes/tests/test_atom.py | 14 ++++++++++++++ libcst/_parser/parso/python/tokenize.py | 20 ++++++++++++++++---- 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index 64561146..da3ea9ef 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -683,6 +683,20 @@ class AtomTest(CSTNodeTest): "parser": parse_expression, "expected_position": None, }, + { + "node": cst.FormattedString( + parts=( + cst.FormattedStringText("\\"), + cst.FormattedStringExpression( + cst.Name(value="a"), + ), + ), + start='fr"', + ), + "code": 'fr"\\{a}"', + "parser": parse_expression, + "expected_position": None, + }, # Validate parens { "node": cst.FormattedString( diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index 478bc03a..6b30c6d0 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -146,6 +146,8 @@ def _get_token_collection(version_info: PythonVersionInfo) -> TokenCollection: return result +fstring_raw_string = _compile(r"(?:[^{}]+|\{\{|\}\})+") + unicode_character_name = r"[A-Za-z0-9\-]+(?: [A-Za-z0-9\-]+)*" fstring_string_single_line = _compile( r"(?:\{\{|\}\}|\\N\{" @@ -155,6 +157,7 @@ fstring_string_single_line = _compile( fstring_string_multi_line = _compile( r"(?:\{\{|\}\}|\\N\{" + unicode_character_name + r"\}|\\[^N]|[^{}\\])+" ) + fstring_format_spec_single_line = _compile(r"(?:\\(?:\r\n?|\n)|[^{}\r\n])+") fstring_format_spec_multi_line = _compile(r"[^{}]+") @@ -334,8 +337,9 @@ class PythonToken(Token): class FStringNode: - def __init__(self, quote): + def __init__(self, quote, raw): self.quote = quote + self.raw = raw self.parentheses_count = 0 self.previous_lines = "" self.last_string_start_pos = None @@ -384,7 +388,9 @@ def _find_fstring_string(endpats, fstring_stack, line, lnum, pos): else: regex = fstring_format_spec_single_line else: - if allow_multiline: + if tos.raw: + regex = fstring_raw_string + elif allow_multiline: regex = fstring_string_multi_line else: regex = fstring_string_single_line @@ -760,7 +766,10 @@ def _tokenize_lines_py36_or_below( # noqa: C901 token in token_collection.fstring_pattern_map ): # The start of an fstring. fstring_stack.append( - FStringNode(token_collection.fstring_pattern_map[token]) + FStringNode( + token_collection.fstring_pattern_map[token], + "r" in token or "R" in token, + ) ) if stashed is not None: yield stashed @@ -1051,7 +1060,10 @@ def _tokenize_lines_py37_or_above( # noqa: C901 token in token_collection.fstring_pattern_map ): # The start of an fstring. fstring_stack.append( - FStringNode(token_collection.fstring_pattern_map[token]) + FStringNode( + token_collection.fstring_pattern_map[token], + "r" in token or "R" in token, + ) ) yield PythonToken(FSTRING_START, token, spos, prefix) elif initial == "\\" and line[start:] in ( From 27ef2bf10ee71d61e658a94f97dcae1d7d94bf39 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 29 Mar 2021 17:49:29 +0100 Subject: [PATCH 109/632] Bump version to 0.3.18 (#470) --- CHANGELOG.md | 12 ++++++++++++ libcst/_version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 716ce121..365dbbd1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +# 0.3.18 - 2021-03-29 + +## Added +- Add FlattenSentinel to support replacing a statement with multiple statements [#455](https://github.com/Instagram/LibCST/pull/455) +- Add BuiltinScope [#469](https://github.com/Instagram/LibCST/pull/469) +- Add FullyQualifiedNameProvider [#465](https://github.com/Instagram/LibCST/pull/465) + +## Updated +- Split QualifiedNameProvider out from libcst.metadata.scope_provider [#464](https://github.com/Instagram/LibCST/pull/464) + +## Fixed +- Exception while parsing escape character in raw f-strings [#462](https://github.com/Instagram/LibCST/issues/462) # 0.3.17 - 2021-02-08 ## Updated diff --git a/libcst/_version.py b/libcst/_version.py index b6558851..4f7f0491 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.17" +LIBCST_VERSION: str = "0.3.18" From e759ca8290585cf3badcd22ac5e1fdbc5e4f9e13 Mon Sep 17 00:00:00 2001 From: Lisa Roach Date: Fri, 2 Apr 2021 12:08:54 -0700 Subject: [PATCH 110/632] Fix leaking processes from TypeInferenceProvider (#474) * Switches to subprocess.run. * Updates cmd to avoid string escaping. --- libcst/metadata/type_inference_provider.py | 16 ++++++++-------- libcst/tests/test_pyre_integration.py | 6 ++++-- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index 7cb7da28..9975d023 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -57,10 +57,11 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): root_path: Path, paths: List[str], timeout: Optional[int] ) -> Mapping[str, object]: params = ",".join(f"path='{root_path / path}'" for path in paths) - cmd = f'''pyre --noninteractive query "types({params})"''' + cmd_args = ["pyre", "--noninteractive", "query", f'"types({params})"'] try: - stdout, stderr, return_code = run_command(cmd, timeout=timeout) + stdout, stderr, return_code = run_command(cmd_args, timeout=timeout) except subprocess.TimeoutExpired as exc: + raise exc if return_code != 0: @@ -101,12 +102,11 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): self._parse_metadata(node) -def run_command(command: str, timeout: Optional[int] = None) -> Tuple[str, str, int]: - process = subprocess.Popen( - command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True - ) - stdout, stderr = process.communicate(timeout=timeout) - return stdout.decode(), stderr.decode(), process.returncode +def run_command( + cmd_args: List[str], timeout: Optional[int] = None +) -> Tuple[str, str, int]: + process = subprocess.run(cmd_args, capture_output=True, timeout=timeout) + return process.stdout.decode(), process.stderr.decode(), process.returncode class RawPyreData(TypedDict): diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 11fd7f8d..6192dcff 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -123,13 +123,15 @@ if __name__ == "__main__": stdout: str stderr: str return_code: int - stdout, stderr, return_code = run_command("pyre start") + stdout, stderr, return_code = run_command(["pyre", "start"]) if return_code != 0: print(stdout) print(stderr) for path in TEST_SUITE_PATH.glob("*.py"): - cmd = f'''pyre query "types(path='{path}')"''' + # Pull params into it's own arg to avoid the string escaping in subprocess + params = f"path='{path}'" + cmd = ["pyre", "query", f"types({params})"] print(cmd) stdout, stderr, return_code = run_command(cmd) if return_code != 0: From 068b90595f9cae42c88e4c6ef3f0167477078cef Mon Sep 17 00:00:00 2001 From: Lisa Roach Date: Thu, 8 Apr 2021 00:33:32 -0700 Subject: [PATCH 111/632] Fixes TypeInferenceProvider breakage with empty cache. (#476) * Fixes TypeInferenceProvider breakage with empty cache. --- libcst/metadata/tests/test_type_inference_provider.py | 7 +++++++ libcst/metadata/type_inference_provider.py | 5 +++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index f6c97751..e7cad72a 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -66,3 +66,10 @@ class TypeInferenceProviderTest(UnitTest): cache={TypeInferenceProvider: data}, ) _test_simple_class_helper(self, wrapper) + + def test_with_empty_cache(self) -> None: + tip = TypeInferenceProvider({}) + self.assertEqual(tip.lookup, {}) + + tip = TypeInferenceProvider(PyreData()) + self.assertEqual(tip.lookup, {}) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index 9975d023..4924738e 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -32,7 +32,7 @@ class InferredType(TypedDict): annotation: str -class PyreData(TypedDict): +class PyreData(TypedDict, total=False): types: Sequence[InferredType] @@ -75,7 +75,8 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): def __init__(self, cache: PyreData) -> None: super().__init__(cache) lookup: Dict[CodeRange, str] = {} - for item in cache["types"]: + cache_types = cache.get("types", []) + for item in cache_types: location = item["location"] start = location["start"] end = location["stop"] From 3b21fa9da9f8f16f11b29db4fcdefa82c877ddc8 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 19 Apr 2021 14:41:12 +0100 Subject: [PATCH 112/632] Return more specific QNames for assignments (#477) When `scope.get_qualified_names_for()` is called with a node that's an `Assignment`, return the qualified name for that node instead of all the assignments for the same name. --- libcst/metadata/scope_provider.py | 32 +++++++++- libcst/metadata/tests/test_name_provider.py | 67 +++++++++++++++++++++ 2 files changed, 97 insertions(+), 2 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index f0becc76..21e7a9e5 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -522,13 +522,17 @@ class Scope(abc.ABC): if isinstance(assignment, Assignment): assignment_node = assignment.node if isinstance(assignment_node, (cst.Import, cst.ImportFrom)): - results |= _NameUtil.find_qualified_name_for_import_alike( + names = _NameUtil.find_qualified_name_for_import_alike( assignment_node, full_name ) else: - results |= _NameUtil.find_qualified_name_for_non_import( + names = _NameUtil.find_qualified_name_for_non_import( assignment, full_name ) + if not isinstance(node, str) and _is_assignment(node, assignment_node): + return names + else: + results |= names elif isinstance(assignment, BuiltinAssignment): results.add( QualifiedName( @@ -747,6 +751,30 @@ def _gen_dotted_names( yield from name_values +def _is_assignment(node: cst.CSTNode, assignment_node: cst.CSTNode) -> bool: + """ + Returns true if ``node`` is part of the assignment at ``assignment_node``. + + Normally this is just a simple identity check, except for imports where the + assignment is attached to the entire import statement but we are interested in + ``Name`` nodes inside the statement. + """ + if node is assignment_node: + return True + if isinstance(assignment_node, (cst.Import, cst.ImportFrom)): + aliases = assignment_node.names + if isinstance(aliases, cst.ImportStar): + return False + for alias in aliases: + if alias.name is node: + return True + asname = alias.asname + if asname is not None: + if asname.name is node: + return True + return False + + class ScopeVisitor(cst.CSTVisitor): # since it's probably not useful. That can makes this visitor cleaner. def __init__(self, provider: "ScopeProvider") -> None: diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index 107a74e7..94b40c21 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -352,6 +352,73 @@ class QualifiedNameProviderTest(UnitTest): names[attribute], {QualifiedName("a.aa.aaa", QualifiedNameSource.IMPORT)} ) + def test_multiple_qualified_names(self) -> None: + m, names = get_qualified_name_metadata_provider( + """ + if False: + def f(): pass + elif False: + from b import f + else: + import f + import a.b as f + + f() + """ + ) + if_ = ensure_type(m.body[0], cst.If) + first_f = ensure_type(if_.body.body[0], cst.FunctionDef) + second_f_alias = ensure_type( + ensure_type( + ensure_type(if_.orelse, cst.If).body.body[0], + cst.SimpleStatementLine, + ).body[0], + cst.ImportFrom, + ).names + self.assertFalse(isinstance(second_f_alias, cst.ImportStar)) + second_f = second_f_alias[0].name + third_f_alias = ensure_type( + ensure_type( + ensure_type(ensure_type(if_.orelse, cst.If).orelse, cst.Else).body.body[ + 0 + ], + cst.SimpleStatementLine, + ).body[0], + cst.Import, + ).names + self.assertFalse(isinstance(third_f_alias, cst.ImportStar)) + third_f = third_f_alias[0].name + fourth_f = ensure_type( + ensure_type( + ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.Import + ) + .names[0] + .asname, + cst.AsName, + ).name + call = ensure_type( + ensure_type( + ensure_type(m.body[2], cst.SimpleStatementLine).body[0], cst.Expr + ).value, + cst.Call, + ) + + self.assertEqual( + names[first_f], {QualifiedName("f", QualifiedNameSource.LOCAL)} + ) + self.assertEqual(names[second_f], set()) + self.assertEqual(names[third_f], set()) + self.assertEqual(names[fourth_f], set()) + self.assertEqual( + names[call], + { + QualifiedName("f", QualifiedNameSource.IMPORT), + QualifiedName("b.f", QualifiedNameSource.IMPORT), + QualifiedName("f", QualifiedNameSource.LOCAL), + QualifiedName("a.b", QualifiedNameSource.IMPORT), + }, + ) + class FullyQualifiedNameProviderTest(UnitTest): def test_builtins(self) -> None: From d1606b7077eb4acc0cfcd90b5160405510e51f65 Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Wed, 28 Apr 2021 10:19:54 +0100 Subject: [PATCH 113/632] Fix formatting for link to QualifiedName class in docs (#480) --- libcst/metadata/name_provider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py index 516db2b4..e26a3eb5 100644 --- a/libcst/metadata/name_provider.py +++ b/libcst/metadata/name_provider.py @@ -95,8 +95,8 @@ def _module_name(path: str) -> Optional[str]: class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedName]]): """ Provide fully qualified names for CST nodes. Like :class:`QualifiedNameProvider`, - but the provided :class:`QualifiedName`s have absolute identifier names instead of - local to the current module. + but the provided :class:`QualifiedName` instances have absolute identifier names + instead of local to the current module. This provider is initialized with the current module's fully qualified name, and can be used with :class:`~libcst.metadata.FullRepoManager`. The module's fully qualified From 4d2ccc54b2b68a9059bc0670a472eded1cb767f9 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 12 May 2021 14:50:15 +0100 Subject: [PATCH 114/632] tie accesses from string annotation to the string node (#483) --- libcst/metadata/scope_provider.py | 40 +++++++++-- libcst/metadata/tests/test_scope_provider.py | 72 ++++++++++++++++++++ 2 files changed, 105 insertions(+), 7 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 21e7a9e5..3b5d380e 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -74,9 +74,10 @@ class Access: #: The node of the access. A name is an access when the expression context is #: :attr:`ExpressionContext.LOAD`. This is usually the name node representing the - #: access, except for dotted imports, when it might be the attribute that - #: represents the most specific part of the imported symbol. - node: Union[cst.Name, cst.Attribute] + #: access, except for: 1) dotted imports, when it might be the attribute that + #: represents the most specific part of the imported symbol; and 2) string + #: annotations, when it is the entire string literal + node: Union[cst.Name, cst.Attribute, cst.BaseString] #: The scope of the access. Note that a access could be in a child scope of its #: assignment. @@ -422,7 +423,7 @@ class Scope(abc.ABC): @abc.abstractmethod def __contains__(self, name: str) -> bool: - """ Check if the name str exist in current scope by ``name in scope``. """ + """Check if the name str exist in current scope by ``name in scope``.""" ... @abc.abstractmethod @@ -775,18 +776,26 @@ def _is_assignment(node: cst.CSTNode, assignment_node: cst.CSTNode) -> bool: return False +@dataclass(frozen=True) +class DeferredAccess: + access: Access + enclosing_attribute: Optional[cst.Attribute] + enclosing_string_annotation: Optional[cst.BaseString] + + class ScopeVisitor(cst.CSTVisitor): # since it's probably not useful. That can makes this visitor cleaner. def __init__(self, provider: "ScopeProvider") -> None: self.provider: ScopeProvider = provider self.scope: Scope = GlobalScope() - self.__deferred_accesses: List[Tuple[Access, Optional[cst.Attribute]]] = [] + self.__deferred_accesses: List[DeferredAccess] = [] self.__top_level_attribute_stack: List[Optional[cst.Attribute]] = [None] self.__in_annotation: Set[ Union[cst.Call, cst.Annotation, cst.Subscript] ] = set() self.__in_type_hint: Set[Union[cst.Call, cst.Annotation, cst.Subscript]] = set() self.__in_ignored_subscript: Set[cst.Subscript] = set() + self.__last_string_annotation: Optional[cst.BaseString] = None self.__ignore_annotation: int = 0 @contextmanager @@ -887,8 +896,13 @@ class ScopeVisitor(cst.CSTVisitor): ) and not self.__in_ignored_subscript: value = node.evaluated_value if value: + top_level_annotation = self.__last_string_annotation is None + if top_level_annotation: + self.__last_string_annotation = node mod = cst.parse_module(value) mod.visit(self) + if top_level_annotation: + self.__last_string_annotation = None return True return False @@ -920,7 +934,11 @@ class ScopeVisitor(cst.CSTVisitor): is_type_hint=bool(self.__in_type_hint), ) self.__deferred_accesses.append( - (access, self.__top_level_attribute_stack[-1]) + DeferredAccess( + access=access, + enclosing_attribute=self.__top_level_attribute_stack[-1], + enclosing_string_annotation=self.__last_string_annotation, + ) ) def visit_FunctionDef(self, node: cst.FunctionDef) -> Optional[bool]: @@ -1074,7 +1092,12 @@ class ScopeVisitor(cst.CSTVisitor): # In worst case, all accesses (m) and assignments (n) refer to the same name, # the time complexity is O(m x n), this optimizes it as O(m + n). scope_name_accesses = defaultdict(set) - for (access, enclosing_attribute) in self.__deferred_accesses: + for def_access in self.__deferred_accesses: + access, enclosing_attribute, enclosing_string_annotation = ( + def_access.access, + def_access.enclosing_attribute, + def_access.enclosing_string_annotation, + ) name = ensure_type(access.node, cst.Name).value if enclosing_attribute is not None: # if _gen_dotted_names doesn't generate any values, fall back to @@ -1085,6 +1108,9 @@ class ScopeVisitor(cst.CSTVisitor): name = attr_name break + if enclosing_string_annotation is not None: + access.node = enclosing_string_annotation + scope_name_accesses[(access.scope, name)].add(access) access.record_assignments(name) access.scope.record_access(name, access) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 59a20aec..8a1bf4b3 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1082,6 +1082,10 @@ class ScopeProviderTest(UnitTest): self.assertEqual(len(assignment.references), 1) references = list(assignment.references) self.assertTrue(references[0].is_annotation) + reference_node = references[0].node + self.assertIsInstance(reference_node, cst.SimpleString) + if isinstance(reference_node, cst.SimpleString): + self.assertEqual(reference_node.evaluated_value, "B") assignment = list(scope["C"])[0] self.assertIsInstance(assignment, Assignment) @@ -1104,6 +1108,10 @@ class ScopeProviderTest(UnitTest): references = list(assignment.references) self.assertFalse(references[0].is_annotation) self.assertTrue(references[0].is_type_hint) + reference_node = references[0].node + self.assertIsInstance(reference_node, cst.SimpleString) + if isinstance(reference_node, cst.SimpleString): + self.assertEqual(reference_node.evaluated_value, "E") assignment = list(scope["E2"])[0] self.assertIsInstance(assignment, Assignment) @@ -1119,6 +1127,10 @@ class ScopeProviderTest(UnitTest): references = list(assignment.references) self.assertFalse(references[0].is_annotation) self.assertTrue(references[0].is_type_hint) + reference_node = references[0].node + self.assertIsInstance(reference_node, cst.SimpleString) + if isinstance(reference_node, cst.SimpleString): + self.assertEqual(reference_node.evaluated_value, "Optional[G]") assignment = list(scope["G2"])[0] self.assertIsInstance(assignment, Assignment) @@ -1130,6 +1142,10 @@ class ScopeProviderTest(UnitTest): references = list(assignment.references) self.assertFalse(references[0].is_annotation) self.assertTrue(references[0].is_type_hint) + reference_node = references[0].node + self.assertIsInstance(reference_node, cst.SimpleString) + if isinstance(reference_node, cst.SimpleString): + self.assertEqual(reference_node.evaluated_value, "H") assignment = list(scope["I"])[0] self.assertIsInstance(assignment, Assignment) @@ -1148,6 +1164,10 @@ class ScopeProviderTest(UnitTest): self.assertEqual(len(assignment.references), 1) references = list(assignment.references) self.assertFalse(references[0].is_annotation) + reference_node = references[0].node + self.assertIsInstance(reference_node, cst.SimpleString) + if isinstance(reference_node, cst.SimpleString): + self.assertEqual(reference_node.evaluated_value, "K") assignment = list(scope["K2"])[0] self.assertIsInstance(assignment, Assignment) @@ -1157,12 +1177,64 @@ class ScopeProviderTest(UnitTest): self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) references = list(assignment.references) + reference_node = references[0].node + self.assertIsInstance(reference_node, cst.SimpleString) + if isinstance(reference_node, cst.SimpleString): + self.assertEqual(reference_node.evaluated_value, "L") assignment = list(scope["M"])[0] self.assertIsInstance(assignment, Assignment) self.assertEqual(len(assignment.references), 1) references = list(assignment.references) + def test_insane_annotation_access(self) -> None: + m, scopes = get_scope_metadata_provider( + r""" + from typing import TypeVar + from a import G + TypeVar("G2", bound="Optional[\"G\"]") + """ + ) + imp = ensure_type( + ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.ImportFrom + ) + call = ensure_type( + ensure_type( + ensure_type(m.body[2], cst.SimpleStatementLine).body[0], cst.Expr + ).value, + cst.Call, + ) + bound = call.args[1].value + scope = scopes[imp] + assignment = next(iter(scope["G"])) + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + self.assertEqual(list(assignment.references)[0].node, bound) + + def test_dotted_annotation_access(self) -> None: + m, scopes = get_scope_metadata_provider( + r""" + from typing import TypeVar + import a.G + TypeVar("G2", bound="a.G") + """ + ) + imp = ensure_type( + ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.Import + ) + call = ensure_type( + ensure_type( + ensure_type(m.body[2], cst.SimpleStatementLine).body[0], cst.Expr + ).value, + cst.Call, + ) + bound = call.args[1].value + scope = scopes[imp] + assignment = next(iter(scope["a.G"])) + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + self.assertEqual(list(assignment.references)[0].node, bound) + def test_node_of_scopes(self) -> None: m, scopes = get_scope_metadata_provider( """ From e0dd6016a54dc2bda8d6df49e10396637b943f06 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 13 May 2021 15:38:04 +0100 Subject: [PATCH 115/632] bump version to 0.3.19 (#484) * bump version to 0.3.19 * Update CHANGELOG.md Co-authored-by: Jimmy Lai Co-authored-by: Jimmy Lai --- CHANGELOG.md | 10 ++++++++++ libcst/_version.py | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 365dbbd1..fe89ab1f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ +# 0.3.19 - 2021-05-12 + +# Updated +- Return more specific QNames for assignments [#477](https://github.com/Instagram/LibCST/pull/477) +- Tie accesses from string annotation to the string node [#483](https://github.com/Instagram/LibCST/pull/483) +## Fixed +- Fix leaking processes from TypeInferenceProvider [#474](https://github.com/Instagram/LibCST/pull/474) +- Fix TypeInferenceProvider breakage with empty cache [#476](https://github.com/Instagram/LibCST/pull/476) +- Fix formatting for link to QualifiedName class in docs [#480](https://github.com/Instagram/LibCST/pull/480) + # 0.3.18 - 2021-03-29 ## Added diff --git a/libcst/_version.py b/libcst/_version.py index 4f7f0491..70304ffb 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.18" +LIBCST_VERSION: str = "0.3.19" From 8c19f1ca8333f1721ef7a58a29644c79694402c2 Mon Sep 17 00:00:00 2001 From: Jos Verlinde Date: Mon, 5 Jul 2021 13:27:41 +0200 Subject: [PATCH 116/632] fix codemodding on windows (#495) * pass empty environment fixes: https://github.com/Instagram/LibCST/issues/494 * remove empty env to support windows. --- libcst/codemod/_cli.py | 1 - 1 file changed, 1 deletion(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 97fe8a09..4ecbfb25 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -55,7 +55,6 @@ def invoke_formatter(formatter_args: Sequence[str], code: AnyStr) -> AnyStr: AnyStr, subprocess.check_output( formatter_args, - env={}, input=code, universal_newlines=not work_with_bytes, encoding=None if work_with_bytes else "utf-8", From 19d9a0761f5d76c0a049c1b9a4be6f31d1d9dab9 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 28 Jul 2021 01:52:59 -0700 Subject: [PATCH 117/632] erify TypeAnnotationsVisitor partial-stub behavior, fix bug (#499) **What?** I added tests of how TypeAnnotationsVisitor reacts to - stubs that ignore decorators and the async keyword - stubs that lack parameter and/or return annotations where a callable has them. I discovered one case where TypeAnnotationsVisitor didn't do what I want it to do: if we're missing a return annotation in the stubs but have one in our function and `overwrite_existing_annotations` is set, we'll actually strip out the existing annotation. So I tweaked the logic to only use the annotation from the stub if it is nonmissing. **Why?** I'm working on a project (pyre infer) that depends on TypeAnnotationsVisitor, and a problem we've run into is that we don't yet have perfect logic for resolving imports in a way that's sufficiently sensitive to local context to be good for codemods - for example, we're not yet sure how to reliably diff against the existing imports for aliased imports (when bar imports foo.Foo and then baz includes `from bar import Foo`) and relative imports. Because we try to give full type annotations on parameters, even where there is a preexisting annotation, this can lead to a lot of noise where diffs fail because of imports changing due to the stub even though there was no actual change to the annotations. In the short term, the easiest solution is to simply omit given annotations from the stubs we generate. But that means we have to be sure TypeAnnotations **Test Plan** ``` > python -m unittest libcst.codemod.visitors.tests.test_apply_type_annotations ........................................... ---------------------------------------------------------------------- Ran 43 tests in 1.548s OK ``` --- .../visitors/_apply_type_annotations.py | 5 +- .../tests/test_apply_type_annotations.py | 81 +++++++++++++++++++ 2 files changed, 85 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 393a560b..9b170de1 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -466,7 +466,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): function_annotation = self.annotations.function_annotations[key] # Only add new annotation if explicitly told to overwrite existing # annotations or if one doesn't already exist. - if self.overwrite_existing_annotations or not updated_node.returns: + set_return_annotation = not updated_node.returns or ( + self.overwrite_existing_annotations and function_annotation.returns + ) + if set_return_annotation: updated_node = updated_node.with_changes( returns=function_annotation.returns ) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index c37fbc69..d41ae8f5 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -765,3 +765,84 @@ class TestApplyAnnotationsVisitor(CodemodTest): overwrite_existing_annotations=True, ) self.assertCodemod(before, after, context_override=context) + + @data_provider( + ( + ( + """ + def fully_annotated_with_untyped_stub(a, b): ... + """, + """ + def fully_annotated_with_untyped_stub(a: bool, b: bool) -> str: + return "hello" + """, + """ + def fully_annotated_with_untyped_stub(a: bool, b: bool) -> str: + return "hello" + """, + ), + ( + """ + def params_annotated_with_return_from_stub(a, b) -> str: ... + """, + """ + def params_annotated_with_return_from_stub(a: bool, b: bool): + return "hello" + """, + """ + def params_annotated_with_return_from_stub(a: bool, b: bool) -> str: + return "hello" + """, + ), + ( + """ + def partially_annotated_params_with_partial_stub(a, b: int): ... + """, + """ + def partially_annotated_params_with_partial_stub(a: bool, b) -> str: + return "hello" + """, + """ + def partially_annotated_params_with_partial_stub(a: bool, b: int) -> str: + return "hello" + """, + ), + ( + """ + def async_with_decorators(a: bool, b: bool) -> str: ... + """, + """ + @second_decorator + @first_decorator(5) + async def async_with_decorators(a, b): + return "hello" + """, + """ + @second_decorator + @first_decorator(5) + async def async_with_decorators(a: bool, b: bool) -> str: + return "hello" + """, + ), + ) + ) + def test_annotate_using_incomplete_stubs( + self, stub: str, before: str, after: str + ) -> None: + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, parse_module(textwrap.dedent(stub.rstrip())) + ) + # Test setting the overwrite flag on the codemod instance. + self.assertCodemod( + before, after, context_override=context, overwrite_existing_annotations=True + ) + + # Test setting the flag when storing the stub in the context. + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, + parse_module(textwrap.dedent(stub.rstrip())), + overwrite_existing_annotations=True, + ) + self.assertCodemod(before, after, context_override=context) From c7f2f577acd1a984019a0c8dd57dd8ecc618d5b5 Mon Sep 17 00:00:00 2001 From: Zach Hammer Date: Wed, 28 Jul 2021 04:53:50 -0400 Subject: [PATCH 118/632] Docs: fix typo (#492) Caught this while quoting your docs in a blog post, which I'll share once published :) --- docs/source/matchers.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/matchers.rst b/docs/source/matchers.rst index eac6faa9..ae67516a 100644 --- a/docs/source/matchers.rst +++ b/docs/source/matchers.rst @@ -5,7 +5,7 @@ Matchers ======== Matchers are provided as a way of asking whether a particular LibCST node and its -children match the a particular shape. It is possible to write a visitor that +children match a particular shape. It is possible to write a visitor that tracks attributes using ``visit_`` methods. It is also possible to implement manual instance checking and traversal of a node's children. However, both are cumbersome to write and hard to understand. Matchers offer a more concise way of From 3009ec9e36e5bdca7f284bcac3d4fcc810b4a922 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 6 Aug 2021 08:55:47 -0700 Subject: [PATCH 119/632] Add tests verifying TypeAnnotationsVisitor current behaviors (#502) * Add tests to verify that LibCST handles string annotations. This is an important property for certain use cases, so it makes sense to verify it in tests so that we can safely depend on it. At present, the reason we want to be able to rely on this is: - at the moment, imports added by infer can make pysa traces hard to understand, because the line numbers are off - if we add the ability to use fully-qualified string annotations for the stubs from infer, then we can do so without adding any import lines and pyre will understand the types. * ApplyTypeAnnotations: add unit test of how import statments are merged Add a unit test illustrating how the codemod handles various cases of import statments in the stub file. Explicitly call out each of the unsupported patterns: - bare imports (we probably should support this) - relative imports (we probably should support this) star imports (we probably don't want to support this) * Add .python-version to .gitignore This will be helpful for anyone using pyenv (I accidentally committed my python version file in a draft branch). --- .gitignore | 1 + .../tests/test_apply_type_annotations.py | 111 +++++++++++++++--- 2 files changed, 96 insertions(+), 16 deletions(-) diff --git a/.gitignore b/.gitignore index 85fb5573..2c52df24 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ build/ .coverage .hypothesis/ .pyre_configuration +.python-version diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index d41ae8f5..16ccda99 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -18,6 +18,76 @@ from libcst.testing.utils import data_provider class TestApplyAnnotationsVisitor(CodemodTest): TRANSFORM: Type[Codemod] = ApplyTypeAnnotationsVisitor + @data_provider( + ( + ( + """ + from __future__ import annotations + from foo import Foo + from baz import Baz + """, + """ + from foo import Bar + import bar + """, + """ + from __future__ import annotations + from foo import Foo, Bar + import bar + from baz import Baz + """, + ), + ( + # Missing feature: ignore aliased imports + """ + from Foo import foo as bar + """, + """ + from Foo import bar + """, + """ + from Foo import bar + """, + ), + ( + # Missing feature: ignore bare imports + """ + import foo + """, + """ + """, + """ + """, + ), + ( + # Missing feature: ignore relative imports + """ + from .. import foo + """, + """ + """, + """ + """, + ), + ( + # Missing feature: ignore star imports + """ + from foo import * + """, + """ + """, + """ + """, + ), + ) + ) + def test_merge_module_imports(self, stub: str, before: str, after: str) -> None: + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, parse_module(textwrap.dedent(stub.rstrip())) + ) + self.assertCodemod(before, after, context_override=context) + @data_provider( ( ( @@ -608,22 +678,6 @@ class TestApplyAnnotationsVisitor(CodemodTest): pass """, ), - # Sanity check that we don't fail when the stub has relative imports. - # We don't do anything with those imports, though. - ( - """ - from .. import hello - def foo() -> typing.Sequence[int]: ... - """, - """ - def foo(): - return [] - """, - """ - def foo() -> typing.Sequence[int]: - return [] - """, - ), ( """ from typing import Dict @@ -669,6 +723,31 @@ class TestApplyAnnotationsVisitor(CodemodTest): def foo(self, atticus, b: Optional[int] = None, c: bool = False): ... """, ), + # Make sure we handle string annotations well + ( + """ + def f(x: "typing.Union[int, str]") -> "typing.Union[int, str]": ... + + class A: + def f(self: "A") -> "A": ... + """, + """ + def f(x): + return x + + class A: + def f(self): + return self + """, + """ + def f(x: "typing.Union[int, str]") -> "typing.Union[int, str]": + return x + + class A: + def f(self: "A") -> "A": + return self + """, + ), ) ) def test_annotate_functions(self, stub: str, before: str, after: str) -> None: From a1282f27f6b9cd0625b013a009bf6e3b5de68561 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 10 Aug 2021 10:37:18 +0100 Subject: [PATCH 120/632] bump version to 0.3.20 (#504) --- CHANGELOG.md | 7 +++++++ libcst/_version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fe89ab1f..73f79ea1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +# 0.3.20 - 2021-08-09 + +## Fixed +- Don't reset subprocess environment to fix codemodding on windows [#495](https://github.com/Instagram/LibCST/pull/495) +- TypeAnnotationsVisitor: don't truncate function return type [#499](https://github.com/Instagram/LibCST/pull/499) +- Docs: Fix typo [#492](https://github.com/Instagram/LibCST/pull/492) + # 0.3.19 - 2021-05-12 # Updated diff --git a/libcst/_version.py b/libcst/_version.py index 70304ffb..960ab01d 100644 --- a/libcst/_version.py +++ b/libcst/_version.py @@ -4,4 +4,4 @@ # LICENSE file in the root directory of this source tree. -LIBCST_VERSION: str = "0.3.19" +LIBCST_VERSION: str = "0.3.20" From 1c3a27bbd521f67896f5318e4dfc80949d86594e Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Tue, 10 Aug 2021 10:01:16 -0400 Subject: [PATCH 121/632] Add Github Actions and derive version from git tags (#471) * Use setuptools-scm to derive the current version from git metadata * Add Github Action equivalent to the current circleci tasks * Run pyre integration test in GH action / tox --- .github/workflows/.pyre_configuration | 9 +++ .github/workflows/build.yml | 88 +++++++++++++++++++++++++++ .gitignore | 2 + libcst/__init__.py | 7 ++- libcst/_version.py | 7 --- libcst/tool.py | 2 +- requirements-dev.txt | 1 + setup.py | 27 +++----- tox.ini | 47 +++++++------- 9 files changed, 138 insertions(+), 52 deletions(-) create mode 100644 .github/workflows/.pyre_configuration create mode 100644 .github/workflows/build.yml delete mode 100644 libcst/_version.py diff --git a/.github/workflows/.pyre_configuration b/.github/workflows/.pyre_configuration new file mode 100644 index 00000000..46a87b63 --- /dev/null +++ b/.github/workflows/.pyre_configuration @@ -0,0 +1,9 @@ +{ + "source_directories": [ + "." + ], + "search_path": [ + "stubs", "~/cache/tox/pyre/lib/python3.8/site-packages/" + ], + "strict": true +} diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..f0f703f7 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,88 @@ +name: Python CI + +on: [push, pull_request] + +env: + PIP_CACHE_DIR: ~/cache/pip + +jobs: + tox: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + python-version: [3.6, 3.7, 3.8] + environment: [test] + include: + - os: ubuntu-latest + python-version: 3.8 + environment: lint + - os: ubuntu-latest + python-version: 3.8 + environment: docs + - os: ubuntu-latest + python-version: 3.8 + environment: coverage + - os: ubuntu-latest + python-version: 3.8 + environment: pyre + steps: + - uses: actions/checkout@v1 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: pip cache + uses: actions/cache@v2 + id: cache + with: + path: '~/cache' + key: pip-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.environment }}-${{ hashFiles('tox.ini', 'requirements.txt', 'requirements-dev.txt', 'setup.py') }} + restore-keys: | + pip-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.environment }}- + pip-${{ matrix.os }}-${{ matrix.python-version }}- + pip-${{ matrix.os }}- + - name: install tox + run: | + python -m pip install --upgrade pip + pip install tox tox-gh-actions -r requirements.txt -r requirements-dev.txt + - name: run tox + run: tox --workdir ~/cache/tox -e ${{ matrix.environment }} + - name: Archive Docs + if: matrix.environment == 'docs' + uses: actions/upload-artifact@v2 + with: + name: sphinx-docs + path: docs/build + - name: Archive Coverage + if: matrix.environment == 'coverage' + uses: actions/upload-artifact@v2 + with: + name: coverage + path: coverage.xml + +# Build python package + build: + needs: tox + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install pypa/build + run: >- + python -m + pip install + build + --user + - name: Build a binary wheel and a source tarball + run: >- + python -m + build + --sdist + --wheel + --outdir dist/ diff --git a/.gitignore b/.gitignore index 2c52df24..4a2bbd69 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ *.pyc *.pyo *.egg-info/ +.eggs/ .pyre/ __pycache__/ .tox/ @@ -10,6 +11,7 @@ docs/build/ dist/ docs/source/.ipynb_checkpoints/ build/ +libcst/_version.py .coverage .hypothesis/ .pyre_configuration diff --git a/libcst/__init__.py b/libcst/__init__.py index cc71ce2a..8d9210e5 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -190,8 +190,13 @@ from libcst._parser.types.config import ( PartialParserConfig, ) from libcst._removal_sentinel import RemovalSentinel, RemoveFromParent -from libcst._version import LIBCST_VERSION from libcst._visitors import CSTNodeT, CSTTransformer, CSTVisitor, CSTVisitorT + + +try: + from libcst._version import version as LIBCST_VERSION +except ImportError: + LIBCST_VERSION = "unknown" from libcst.helpers import ( # from libcst import ensure_type is deprecated, will be removed in 0.4.0 ensure_type, ) diff --git a/libcst/_version.py b/libcst/_version.py deleted file mode 100644 index 960ab01d..00000000 --- a/libcst/_version.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Facebook, Inc. and its affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -LIBCST_VERSION: str = "0.3.20" diff --git a/libcst/tool.py b/libcst/tool.py index de626ac3..fff190c2 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -801,7 +801,7 @@ def main(proc_name: str, cli_args: List[str]) -> int: "--version", help="Print current version of LibCST toolset.", action="version", - version=f"LibCST version {LIBCST_VERSION}", + version=f"LibCST version {LIBCST_VERSION}", # pyre-ignore[16] pyre bug? ) parser.add_argument( "action", diff --git a/requirements-dev.txt b/requirements-dev.txt index 2f86e9b3..62e26410 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -13,3 +13,4 @@ pyre-check==0.0.41 sphinx-rtd-theme>=0.4.3 prompt-toolkit>=2.0.9 tox>=3.18.1 +setuptools_scm>=6.0.1 diff --git a/setup.py b/setup.py index 4dd024dd..f554140d 100644 --- a/setup.py +++ b/setup.py @@ -4,38 +4,24 @@ # LICENSE file in the root directory of this source tree. -import importlib.util from os import path -from typing import TYPE_CHECKING import setuptools -if TYPE_CHECKING: - from importlib.machinery import ModuleSpec - from types import ModuleType - # Grab the readme so that our package stays in sync with github. this_directory: str = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.rst"), encoding="utf-8") as f: long_description = f.read() -# Grab the version constant so that libcst.tool stays in sync with this package. -spec: "ModuleSpec" = importlib.util.spec_from_file_location( - "version", path.join(this_directory, "libcst/_version.py") -) -version: "ModuleType" = importlib.util.module_from_spec(spec) -# pyre-ignore Pyre doesn't know about importlib entirely. -spec.loader.exec_module(version) -# pyre-ignore Pyre has no way of knowing that this constant exists. -LIBCST_VERSION = version.LIBCST_VERSION - setuptools.setup( + use_scm_version={ + "write_to": "libcst/_version.py", + }, name="libcst", description="A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7 and 3.8 programs.", long_description=long_description, long_description_content_type="text/x-rst", - version=LIBCST_VERSION, url="https://github.com/Instagram/LibCST", license="MIT", packages=setuptools.find_packages(), @@ -46,9 +32,14 @@ setuptools.setup( }, test_suite="libcst", python_requires=">=3.6", + setup_requires=["setuptools_scm"], install_requires=[dep.strip() for dep in open("requirements.txt").readlines()], extras_require={ - "dev": [dep.strip() for dep in open("requirements-dev.txt").readlines() if "=" in dep], + "dev": [ + dep.strip() + for dep in open("requirements-dev.txt").readlines() + if "=" in dep + ], }, classifiers=[ "License :: OSI Approved :: MIT License", diff --git a/tox.ini b/tox.ini index a9947c2a..5578d290 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,11 @@ [tox] -envlist = py36, py37, py38, py39, lint, docs +envlist = py36, py37, py38, lint, docs + +[gh-actions] +python = + 3.6: py36 + 3.7: py37 + 3.8: py38 [testenv] deps = @@ -8,10 +14,9 @@ deps = commands = python -m unittest {posargs} +[testenv:test] + [testenv:lint] -deps = - -rrequirements.txt - -rrequirements-dev.txt commands = flake8 {posargs} isort --check-only {posargs:.} @@ -19,16 +24,10 @@ commands = python3 -m fixit.cli.run_rules [testenv:docs] -deps = - -rrequirements.txt - -rrequirements-dev.txt commands = sphinx-build {posargs:docs/source/ docs/build/} [testenv:autofix] -deps = - -rrequirements.txt - -rrequirements-dev.txt commands = flake8 {posargs} isort -q {posargs:.} @@ -36,9 +35,6 @@ commands = python3 -m fixit.cli.apply_fix [testenv:coverage] -deps = - -rrequirements.txt - -rrequirements-dev.txt passenv = CI CIRCLECI @@ -47,11 +43,20 @@ commands = coverage run setup.py test codecov +[testenv:pyre] +usedevelop=True +setenv = PYTHONPATH = {toxinidir} +allowlist_externals= + cp +commands = + cp .github/workflows/.pyre_configuration . + pyre --version + pyre check + python libcst/tests/test_pyre_integration.py + git diff --exit-code + [testenv:fuzz36] basepython = python3.6 -deps = - -rrequirements.txt - -rrequirements-dev.txt setenv = HYPOTHESIS = 1 commands = @@ -59,9 +64,6 @@ commands = [testenv:fuzz37] basepython = python3.7 -deps = - -rrequirements.txt - -rrequirements-dev.txt setenv = HYPOTHESIS = 1 commands = @@ -69,18 +71,13 @@ commands = [testenv:fuzz38] basepython = python3.8 -deps = - -rrequirements.txt - -rrequirements-dev.txt setenv = HYPOTHESIS = 1 commands = python3.8 -m unittest libcst/tests/test_fuzz.py + [testenv:codegen] -deps = - -rrequirements.txt - -rrequirements-dev.txt commands = python3 -m libcst.codegen.generate visitors python3 -m libcst.codegen.generate return_types From 03b3933af63c5c8ca8033abc2bca32578dfceec0 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 11 Aug 2021 13:50:18 +0100 Subject: [PATCH 122/632] remove circleci config and update readme (#509) --- .circleci/config.yml | 114 ------------------------------------------- README.rst | 8 +-- 2 files changed, 4 insertions(+), 118 deletions(-) delete mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 1ae8cce7..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,114 +0,0 @@ -# Python CircleCI 2.0 configuration file -# -# Check https://circleci.com/docs/2.0/language-python/ for more details -# -version: 2.1 -workflows: - version: 2 - test: - jobs: - - lint - - docs - - pyre - - test-38 - - test-37 - - test-36 - - test-coverage - -commands: - tox: - description: "setup tox env and run tox command giving env parameter" - parameters: - env: - type: string - default: test - steps: - - checkout - - restore_cache: - key: tox-v1-{{ checksum "tox.ini" }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements-dev.txt" }}-{{ checksum "setup.py" }}-{{ checksum ".circleci/config.yml" }}-<< parameters.env >> - - run: - name: install tox - command: pip install --user tox - - run: - name: run tox - command: ~/.local/bin/tox -e << parameters.env >> - - save_cache: - key: tox-v1-{{ checksum "tox.ini" }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements-dev.txt" }}-{{ checksum "setup.py" }}-{{ checksum ".circleci/config.yml" }}-<< parameters.env >> - paths: - - '.tox' - -jobs: - lint: - docker: - - image: circleci/python:3.7 - steps: - - tox: - env: "lint" - - docs: - docker: - - image: circleci/python:3.7 - steps: - - run: - command: sudo apt-get install graphviz - - tox: - env: "docs" - - store_artifacts: - path: docs/build - destination: doc - - pyre: - docker: - - image: circleci/python:3.7 - steps: - - checkout - - restore_cache: - key: pyre-v1-{{ checksum "tox.ini" }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements-dev.txt" }}-{{ checksum "setup.py" }}-{{ checksum ".circleci/config.yml" }} - - run: - name: run pyre - command: | - test -d /tmp/libcst-env/ || python3 -m venv /tmp/libcst-env/ - source /tmp/libcst-env/bin/activate - pip install --upgrade pip - pip install -r requirements.txt -r requirements-dev.txt - pip uninstall -y libcst - pip install -e . - cp .circleci/.pyre_configuration . - pyre check - PYTHONPATH=`pwd` python libcst/tests/test_pyre_integration.py - git diff --exit-code # verify no generated changes - - save_cache: - key: pyre-v1-{{ checksum "tox.ini" }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements-dev.txt" }}-{{ checksum "setup.py" }}-{{ checksum ".circleci/config.yml" }} - paths: - - '/tmp/libcst-env/' - - test-38: - docker: - - image: circleci/python:3.8 - steps: - - tox: - env: "py38" - - test-37: - docker: - - image: circleci/python:3.7 - steps: - - tox: - env: "py37" - - test-coverage: - docker: - - image: circleci/python:3.7 - steps: - - tox: - env: "py37" - - tox: - env: "coverage" - - test-36: - docker: - - image: circleci/python:3.6 - steps: - - tox: - env: "py36" - diff --git a/README.rst b/README.rst index a2e999f9..3e563a2f 100644 --- a/README.rst +++ b/README.rst @@ -4,15 +4,15 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python -|readthedocs-badge| |circleci-badge| |codecov-badge| |pypi-badge| |pypi-download| |notebook-badge| +|readthedocs-badge| |ci-badge| |codecov-badge| |pypi-badge| |pypi-download| |notebook-badge| .. |readthedocs-badge| image:: https://readthedocs.org/projects/pip/badge/?version=latest&style=flat :target: https://libcst.readthedocs.io/en/latest/ :alt: Documentation -.. |circleci-badge| image:: https://circleci.com/gh/Instagram/LibCST/tree/master.svg?style=shield&circle-token=f89ff46c689cf53116308db295a492d687bf5732 - :target: https://circleci.com/gh/Instagram/LibCST/tree/master - :alt: CircleCI +.. |ci-badge| image:: https://github.com/Instagram/LibCST/actions/workflows/build.yml/badge.svg + :target: https://github.com/Instagram/LibCST/actions/workflows/build.yml?query=branch%3Amaster + :alt: Github Actions .. |codecov-badge| image:: https://codecov.io/gh/Instagram/LibCST/branch/master/graph/badge.svg :target: https://codecov.io/gh/Instagram/LibCST/branch/master From 87f0becd0155cd9edf1877a56447a2c683e4a6a2 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 11 Aug 2021 09:48:30 -0400 Subject: [PATCH 123/632] Change codegen to treat typing.Union[Foo, NoneType] and typing.Optional[Foo] as the same (#508) --- libcst/codegen/gen_matcher_classes.py | 9 +- libcst/matchers/__init__.py | 1091 +++++++++++++------------ 2 files changed, 591 insertions(+), 509 deletions(-) diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index b0657890..7bb7120d 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -3,6 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import re from dataclasses import dataclass, fields from typing import Generator, List, Optional, Sequence, Set, Tuple, Type, Union @@ -12,6 +13,8 @@ from libcst.codegen.gather import all_libcst_nodes, typeclasses CST_DIR: Set[str] = set(dir(cst)) +CLASS_RE = r"" +OPTIONAL_RE = r"typing\.Union\[([^,]*?), NoneType]" class CleanseFullTypeNames(cst.CSTTransformer): @@ -396,8 +399,8 @@ def _get_clean_type_and_aliases( # First, get the type as a parseable expression. typestr = repr(typeobj) - if typestr.startswith(""): - typestr = typestr[8:-2] + typestr = re.sub(CLASS_RE, r"\1", typestr) + typestr = re.sub(OPTIONAL_RE, r"typing.Optional[\1]", typestr) # Now, parse the expression with LibCST. cleanser = CleanseFullTypeNames() @@ -457,7 +460,7 @@ generated_code.append("") generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes") generated_code.append("from dataclasses import dataclass") -generated_code.append("from typing import Callable, Sequence, Union") +generated_code.append("from typing import Callable, Optional, Sequence, Union") generated_code.append("from typing_extensions import Literal") generated_code.append("import libcst as cst") generated_code.append("") diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 73b3e7f2..8b7ba63f 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -6,7 +6,7 @@ # This file was generated by libcst.codegen.gen_matcher_classes from dataclasses import dataclass -from typing import Callable, Sequence, Union +from typing import Callable, Optional, Sequence, Union from typing_extensions import Literal @@ -231,12 +231,6 @@ BaseAssignTargetExpressionMatchType = Union[ AnnotationMatchType = Union[ "Annotation", MetadataMatchType, MatchIfTrue[Callable[[cst.Annotation], bool]] ] -BaseExpressionOrNoneMatchType = Union[ - "BaseExpression", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.BaseExpression, None]], bool]], -] AssignEqualMatchType = Union[ "AssignEqual", MetadataMatchType, MatchIfTrue[Callable[[cst.AssignEqual], bool]] ] @@ -260,10 +254,24 @@ class AnnAssign(BaseSmallStatement, BaseMatcherNode): AllOf[AnnotationMatchType], ] = DoNotCare() value: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() equal: Union[ AssignEqualMatchType, @@ -320,12 +328,6 @@ class Annotation(BaseMatcherNode): ] = DoNotCare() -NameOrNoneMatchType = Union[ - "Name", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Name, None]], bool]], -] CommaMatchType = Union[ "Comma", MetadataMatchType, MatchIfTrue[Callable[[cst.Comma], bool]] ] @@ -340,10 +342,24 @@ class Arg(BaseMatcherNode): AllOf[BaseExpressionMatchType], ] = DoNotCare() keyword: Union[ - NameOrNoneMatchType, + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Name]], bool]], DoNotCareSentinel, - OneOf[NameOrNoneMatchType], - AllOf[NameOrNoneMatchType], + OneOf[ + Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Name]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Name]], bool]], + ] + ], ] = DoNotCare() equal: Union[ AssignEqualMatchType, @@ -447,10 +463,24 @@ class Assert(BaseSmallStatement, BaseMatcherNode): AllOf[BaseExpressionMatchType], ] = DoNotCare() msg: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() comma: Union[ CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] @@ -2385,18 +2415,6 @@ class Comment(BaseMatcherNode): CompIfMatchType = Union[ "CompIf", MetadataMatchType, MatchIfTrue[Callable[[cst.CompIf], bool]] ] -CompForOrNoneMatchType = Union[ - "CompFor", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.CompFor, None]], bool]], -] -AsynchronousOrNoneMatchType = Union[ - "Asynchronous", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Asynchronous, None]], bool]], -] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -2494,16 +2512,44 @@ class CompFor(BaseMatcherNode): ], ] = DoNotCare() inner_for_in: Union[ - CompForOrNoneMatchType, + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.CompFor]], bool]], DoNotCareSentinel, - OneOf[CompForOrNoneMatchType], - AllOf[CompForOrNoneMatchType], + OneOf[ + Union[ + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.CompFor]], bool]], + ] + ], + AllOf[ + Union[ + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.CompFor]], bool]], + ] + ], ] = DoNotCare() asynchronous: Union[ - AsynchronousOrNoneMatchType, + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], DoNotCareSentinel, - OneOf[AsynchronousOrNoneMatchType], - AllOf[AsynchronousOrNoneMatchType], + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + ] + ], ] = DoNotCare() whitespace_before: Union[ BaseParenthesizableWhitespaceMatchType, @@ -4126,12 +4172,6 @@ class Else(BaseMatcherNode): boolMatchType = Union[bool, MetadataMatchType, MatchIfTrue[Callable[[bool], bool]]] -CommentOrNoneMatchType = Union[ - "Comment", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Comment, None]], bool]], -] NewlineMatchType = Union[ "Newline", MetadataMatchType, MatchIfTrue[Callable[[cst.Newline], bool]] ] @@ -4149,10 +4189,24 @@ class EmptyLine(BaseMatcherNode): AllOf[SimpleWhitespaceMatchType], ] = DoNotCare() comment: Union[ - CommentOrNoneMatchType, + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], DoNotCareSentinel, - OneOf[CommentOrNoneMatchType], - AllOf[CommentOrNoneMatchType], + OneOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + ] + ], ] = DoNotCare() newline: Union[ NewlineMatchType, @@ -4190,14 +4244,6 @@ class Equal(BaseCompOp, BaseMatcherNode): ] = DoNotCare() -AsNameOrNoneMatchType = Union[ - "AsName", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.AsName, None]], bool]], -] - - @dataclass(frozen=True, eq=False, unsafe_hash=False) class ExceptHandler(BaseMatcherNode): body: Union[ @@ -4207,16 +4253,44 @@ class ExceptHandler(BaseMatcherNode): AllOf[BaseSuiteMatchType], ] = DoNotCare() type: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() name: Union[ - AsNameOrNoneMatchType, + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], DoNotCareSentinel, - OneOf[AsNameOrNoneMatchType], - AllOf[AsNameOrNoneMatchType], + OneOf[ + Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + ] + ], + AllOf[ + Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + ] + ], ] = DoNotCare() leading_lines: Union[ Sequence[ @@ -4659,14 +4733,6 @@ class FloorDivideAssign(BaseAugOp, BaseMatcherNode): ] = DoNotCare() -ElseOrNoneMatchType = Union[ - "Else", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Else, None]], bool]], -] - - @dataclass(frozen=True, eq=False, unsafe_hash=False) class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): target: Union[ @@ -4688,16 +4754,44 @@ class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): AllOf[BaseSuiteMatchType], ] = DoNotCare() orelse: Union[ - ElseOrNoneMatchType, + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], DoNotCareSentinel, - OneOf[ElseOrNoneMatchType], - AllOf[ElseOrNoneMatchType], + OneOf[ + Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + ] + ], ] = DoNotCare() asynchronous: Union[ - AsynchronousOrNoneMatchType, + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], DoNotCareSentinel, - OneOf[AsynchronousOrNoneMatchType], - AllOf[AsynchronousOrNoneMatchType], + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + ] + ], ] = DoNotCare() leading_lines: Union[ Sequence[ @@ -5091,17 +5185,6 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ] = DoNotCare() -strOrNoneMatchType = Union[ - str, None, MetadataMatchType, MatchIfTrue[Callable[[Union[str, None]], bool]] -] -AssignEqualOrNoneMatchType = Union[ - "AssignEqual", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.AssignEqual, None]], bool]], -] - - @dataclass(frozen=True, eq=False, unsafe_hash=False) class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): expression: Union[ @@ -5111,335 +5194,47 @@ class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): AllOf[BaseExpressionMatchType], ] = DoNotCare() conversion: Union[ - strOrNoneMatchType, + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], DoNotCareSentinel, - OneOf[strOrNoneMatchType], - AllOf[strOrNoneMatchType], + OneOf[ + Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], + ] + ], + AllOf[ + Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], + ] + ], ] = DoNotCare() format_spec: Union[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseFormattedStringContent]], bool]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Callable[[Sequence[cst.BaseFormattedStringContent]], bool] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Callable[[Sequence[cst.BaseFormattedStringContent]], bool] - ], - ] - ], - ], - None, + Optional[Sequence["BaseFormattedStringContent"]], MetadataMatchType, MatchIfTrue[ - Callable[ - [ - Union[ - Sequence[cst.BaseFormattedStringContent], - None, - OneOf[Union[Sequence[cst.BaseFormattedStringContent], None]], - AllOf[Union[Sequence[cst.BaseFormattedStringContent], None]], - ] - ], - bool, - ] + Callable[[Optional[Sequence[cst.BaseFormattedStringContent]]], bool] ], DoNotCareSentinel, OneOf[ Union[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Callable[[Sequence[cst.BaseFormattedStringContent]], bool] - ], - OneOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Callable[ - [Sequence[cst.BaseFormattedStringContent]], bool - ] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Callable[ - [Sequence[cst.BaseFormattedStringContent]], bool - ] - ], - ] - ], - ], - None, + Optional[Sequence["BaseFormattedStringContent"]], MetadataMatchType, MatchIfTrue[ - Callable[ - [ - Union[ - Sequence[cst.BaseFormattedStringContent], - None, - OneOf[ - Union[ - Sequence[cst.BaseFormattedStringContent], None - ] - ], - AllOf[ - Union[ - Sequence[cst.BaseFormattedStringContent], None - ] - ], - ] - ], - bool, - ] + Callable[[Optional[Sequence[cst.BaseFormattedStringContent]]], bool] ], ] ], AllOf[ Union[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Callable[[Sequence[cst.BaseFormattedStringContent]], bool] - ], - OneOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Callable[ - [Sequence[cst.BaseFormattedStringContent]], bool - ] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Callable[ - [Sequence[cst.BaseFormattedStringContent]], bool - ] - ], - ] - ], - ], - None, + Optional[Sequence["BaseFormattedStringContent"]], MetadataMatchType, MatchIfTrue[ - Callable[ - [ - Union[ - Sequence[cst.BaseFormattedStringContent], - None, - OneOf[ - Union[ - Sequence[cst.BaseFormattedStringContent], None - ] - ], - AllOf[ - Union[ - Sequence[cst.BaseFormattedStringContent], None - ] - ], - ] - ], - bool, - ] + Callable[[Optional[Sequence[cst.BaseFormattedStringContent]]], bool] ], ] ], @@ -5457,10 +5252,24 @@ class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): AllOf[BaseParenthesizableWhitespaceMatchType], ] = DoNotCare() equal: Union[ - AssignEqualOrNoneMatchType, + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AssignEqual]], bool]], DoNotCareSentinel, - OneOf[AssignEqualOrNoneMatchType], - AllOf[AssignEqualOrNoneMatchType], + OneOf[ + Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AssignEqual]], bool]], + ] + ], + AllOf[ + Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AssignEqual]], bool]], + ] + ], ] = DoNotCare() metadata: Union[ MetadataMatchType, @@ -5514,12 +5323,6 @@ class From(BaseMatcherNode): ParametersMatchType = Union[ "Parameters", MetadataMatchType, MatchIfTrue[Callable[[cst.Parameters], bool]] ] -AnnotationOrNoneMatchType = Union[ - "Annotation", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Annotation, None]], bool]], -] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -5620,16 +5423,44 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] = DoNotCare() returns: Union[ - AnnotationOrNoneMatchType, + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], DoNotCareSentinel, - OneOf[AnnotationOrNoneMatchType], - AllOf[AnnotationOrNoneMatchType], + OneOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + ] + ], ] = DoNotCare() asynchronous: Union[ - AsynchronousOrNoneMatchType, + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], DoNotCareSentinel, - OneOf[AsynchronousOrNoneMatchType], - AllOf[AsynchronousOrNoneMatchType], + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + ] + ], ] = DoNotCare() leading_lines: Union[ Sequence[ @@ -6794,10 +6625,24 @@ class ImportAlias(BaseMatcherNode): AllOf[AttributeOrNameMatchType], ] = DoNotCare() asname: Union[ - AsNameOrNoneMatchType, + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], DoNotCareSentinel, - OneOf[AsNameOrNoneMatchType], - AllOf[AsNameOrNoneMatchType], + OneOf[ + Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + ] + ], + AllOf[ + Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + ] + ], ] = DoNotCare() comma: Union[ CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] @@ -6817,18 +6662,6 @@ AttributeOrNameOrNoneMatchType = Union[ MetadataMatchType, MatchIfTrue[Callable[[Union[cst.Attribute, cst.Name, None]], bool]], ] -LeftParenOrNoneMatchType = Union[ - "LeftParen", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.LeftParen, None]], bool]], -] -RightParenOrNoneMatchType = Union[ - "RightParen", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.RightParen, None]], bool]], -] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -7200,16 +7033,44 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] = DoNotCare() lpar: Union[ - LeftParenOrNoneMatchType, + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.LeftParen]], bool]], DoNotCareSentinel, - OneOf[LeftParenOrNoneMatchType], - AllOf[LeftParenOrNoneMatchType], + OneOf[ + Union[ + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.LeftParen]], bool]], + ] + ], + AllOf[ + Union[ + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.LeftParen]], bool]], + ] + ], ] = DoNotCare() rpar: Union[ - RightParenOrNoneMatchType, + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.RightParen]], bool]], DoNotCareSentinel, - OneOf[RightParenOrNoneMatchType], - AllOf[RightParenOrNoneMatchType], + OneOf[ + Union[ + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.RightParen]], bool]], + ] + ], + AllOf[ + Union[ + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.RightParen]], bool]], + ] + ], ] = DoNotCare() semicolon: Union[ SemicolonMatchType, @@ -7369,10 +7230,24 @@ class IndentedBlock(BaseSuite, BaseMatcherNode): AllOf[TrailingWhitespaceMatchType], ] = DoNotCare() indent: Union[ - strOrNoneMatchType, + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], DoNotCareSentinel, - OneOf[strOrNoneMatchType], - AllOf[strOrNoneMatchType], + OneOf[ + Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], + ] + ], + AllOf[ + Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], + ] + ], ] = DoNotCare() footer: Union[ Sequence[ @@ -9404,10 +9279,24 @@ class NamedExpr(BaseExpression, BaseMatcherNode): @dataclass(frozen=True, eq=False, unsafe_hash=False) class Newline(BaseMatcherNode): value: Union[ - strOrNoneMatchType, + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], DoNotCareSentinel, - OneOf[strOrNoneMatchType], - AllOf[strOrNoneMatchType], + OneOf[ + Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], + ] + ], + AllOf[ + Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[str]], bool]], + ] + ], ] = DoNotCare() metadata: Union[ MetadataMatchType, @@ -9616,10 +9505,24 @@ class Param(BaseMatcherNode): NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] ] = DoNotCare() annotation: Union[ - AnnotationOrNoneMatchType, + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], DoNotCareSentinel, - OneOf[AnnotationOrNoneMatchType], - AllOf[AnnotationOrNoneMatchType], + OneOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + ] + ], ] = DoNotCare() equal: Union[ AssignEqualMatchType, @@ -9628,10 +9531,24 @@ class Param(BaseMatcherNode): AllOf[AssignEqualMatchType], ] = DoNotCare() default: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() comma: Union[ CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] @@ -9694,12 +9611,6 @@ ParamOrParamStarMatchType = Union[ MetadataMatchType, MatchIfTrue[Callable[[Union[cst.Param, cst.ParamStar]], bool]], ] -ParamOrNoneMatchType = Union[ - "Param", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Param, None]], bool]], -] ParamSlashMatchType = Union[ "ParamSlash", MetadataMatchType, MatchIfTrue[Callable[[cst.ParamSlash], bool]] ] @@ -9874,10 +9785,24 @@ class Parameters(BaseMatcherNode): ], ] = DoNotCare() star_kwarg: Union[ - ParamOrNoneMatchType, + Optional["Param"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Param]], bool]], DoNotCareSentinel, - OneOf[ParamOrNoneMatchType], - AllOf[ParamOrNoneMatchType], + OneOf[ + Union[ + Optional["Param"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Param]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Param"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Param]], bool]], + ] + ], ] = DoNotCare() posonly_params: Union[ Sequence[ @@ -10154,27 +10079,47 @@ class PowerAssign(BaseAugOp, BaseMatcherNode): ] = DoNotCare() -FromOrNoneMatchType = Union[ - "From", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.From, None]], bool]], -] - - @dataclass(frozen=True, eq=False, unsafe_hash=False) class Raise(BaseSmallStatement, BaseMatcherNode): exc: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() cause: Union[ - FromOrNoneMatchType, + Optional["From"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.From]], bool]], DoNotCareSentinel, - OneOf[FromOrNoneMatchType], - AllOf[FromOrNoneMatchType], + OneOf[ + Union[ + Optional["From"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.From]], bool]], + ] + ], + AllOf[ + Union[ + Optional["From"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.From]], bool]], + ] + ], ] = DoNotCare() whitespace_after_raise: Union[ SimpleWhitespaceMatchType, @@ -10199,10 +10144,24 @@ class Raise(BaseSmallStatement, BaseMatcherNode): @dataclass(frozen=True, eq=False, unsafe_hash=False) class Return(BaseSmallStatement, BaseMatcherNode): value: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() whitespace_after_return: Union[ SimpleWhitespaceMatchType, @@ -11268,22 +11227,64 @@ class SimpleWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): @dataclass(frozen=True, eq=False, unsafe_hash=False) class Slice(BaseSlice, BaseMatcherNode): lower: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() upper: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() step: Union[ - BaseExpressionOrNoneMatchType, + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], DoNotCareSentinel, - OneOf[BaseExpressionOrNoneMatchType], - AllOf[BaseExpressionOrNoneMatchType], + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + ] + ], ] = DoNotCare() first_colon: Union[ ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] @@ -11869,10 +11870,24 @@ class TrailingWhitespace(BaseMatcherNode): AllOf[SimpleWhitespaceMatchType], ] = DoNotCare() comment: Union[ - CommentOrNoneMatchType, + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], DoNotCareSentinel, - OneOf[CommentOrNoneMatchType], - AllOf[CommentOrNoneMatchType], + OneOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + ] + ], ] = DoNotCare() newline: Union[ NewlineMatchType, @@ -11891,12 +11906,6 @@ class TrailingWhitespace(BaseMatcherNode): ExceptHandlerMatchType = Union[ "ExceptHandler", MetadataMatchType, MatchIfTrue[Callable[[cst.ExceptHandler], bool]] ] -FinallyOrNoneMatchType = Union[ - "Finally", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Finally, None]], bool]], -] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -11988,16 +11997,44 @@ class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] = DoNotCare() orelse: Union[ - ElseOrNoneMatchType, + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], DoNotCareSentinel, - OneOf[ElseOrNoneMatchType], - AllOf[ElseOrNoneMatchType], + OneOf[ + Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + ] + ], ] = DoNotCare() finalbody: Union[ - FinallyOrNoneMatchType, + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Finally]], bool]], DoNotCareSentinel, - OneOf[FinallyOrNoneMatchType], - AllOf[FinallyOrNoneMatchType], + OneOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Finally]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Finally]], bool]], + ] + ], ] = DoNotCare() leading_lines: Union[ Sequence[ @@ -12547,10 +12584,24 @@ class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): AllOf[BaseSuiteMatchType], ] = DoNotCare() orelse: Union[ - ElseOrNoneMatchType, + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], DoNotCareSentinel, - OneOf[ElseOrNoneMatchType], - AllOf[ElseOrNoneMatchType], + OneOf[ + Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + ] + ], ] = DoNotCare() leading_lines: Union[ Sequence[ @@ -12746,10 +12797,24 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): AllOf[BaseSuiteMatchType], ] = DoNotCare() asynchronous: Union[ - AsynchronousOrNoneMatchType, + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], DoNotCareSentinel, - OneOf[AsynchronousOrNoneMatchType], - AllOf[AsynchronousOrNoneMatchType], + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + ] + ], ] = DoNotCare() leading_lines: Union[ Sequence[ @@ -12860,10 +12925,24 @@ class WithItem(BaseMatcherNode): AllOf[BaseExpressionMatchType], ] = DoNotCare() asname: Union[ - AsNameOrNoneMatchType, + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], DoNotCareSentinel, - OneOf[AsNameOrNoneMatchType], - AllOf[AsNameOrNoneMatchType], + OneOf[ + Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + ] + ], + AllOf[ + Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + ] + ], ] = DoNotCare() comma: Union[ CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] From 398d14e91bf5ffdc6751e010dedde146aedc51c6 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 11 Aug 2021 11:41:18 -0400 Subject: [PATCH 124/632] Switch code coverage from tox => gh actions (#510) --- .github/workflows/build.yml | 38 ++++++++++++++++++++++++++++++++----- requirements-dev.txt | 1 - tox.ini | 9 --------- 3 files changed, 33 insertions(+), 15 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f0f703f7..98eca297 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -21,9 +21,6 @@ jobs: - os: ubuntu-latest python-version: 3.8 environment: docs - - os: ubuntu-latest - python-version: 3.8 - environment: coverage - os: ubuntu-latest python-version: 3.8 environment: pyre @@ -46,7 +43,7 @@ jobs: - name: install tox run: | python -m pip install --upgrade pip - pip install tox tox-gh-actions -r requirements.txt -r requirements-dev.txt + pip install tox-gh-actions -r requirements.txt -r requirements-dev.txt - name: run tox run: tox --workdir ~/cache/tox -e ${{ matrix.environment }} - name: Archive Docs @@ -55,8 +52,39 @@ jobs: with: name: sphinx-docs path: docs/build + +# Upload test coverage + coverage: + needs: tox + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: pip cache + uses: actions/cache@v2 + id: cache + with: + path: '~/cache' + key: pip-ubuntu-latest-3.8-test-${{ hashFiles('tox.ini', 'requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: install pip + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt -r requirements-dev.txt + - name: generate coverage + run: | + coverage run setup.py test + coverage xml -i + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v2 + with: + files: coverage.xml + fail_ci_if_error: true + verbose: true - name: Archive Coverage - if: matrix.environment == 'coverage' uses: actions/upload-artifact@v2 with: name: coverage diff --git a/requirements-dev.txt b/requirements-dev.txt index 62e26410..0dd8f54d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,4 @@ black==20.8b1 -codecov>=2.1.4 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8 diff --git a/tox.ini b/tox.ini index 5578d290..d2771001 100644 --- a/tox.ini +++ b/tox.ini @@ -34,15 +34,6 @@ commands = black {posargs:libcst/} python3 -m fixit.cli.apply_fix -[testenv:coverage] -passenv = - CI - CIRCLECI - CIRCLE_* -commands = - coverage run setup.py test - codecov - [testenv:pyre] usedevelop=True setenv = PYTHONPATH = {toxinidir} From 1cceed6df81415f00d920419244298cc4b442902 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 11 Aug 2021 12:16:58 -0400 Subject: [PATCH 125/632] Adding python3.9 to the CI (#506) --- .github/workflows/.pyre_configuration | 2 +- .github/workflows/build.yml | 14 +++++++------- tox.ini | 3 ++- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/.github/workflows/.pyre_configuration b/.github/workflows/.pyre_configuration index 46a87b63..83478f3b 100644 --- a/.github/workflows/.pyre_configuration +++ b/.github/workflows/.pyre_configuration @@ -3,7 +3,7 @@ "." ], "search_path": [ - "stubs", "~/cache/tox/pyre/lib/python3.8/site-packages/" + "stubs", "~/cache/tox/pyre/lib/python3.9/site-packages/" ], "strict": true } diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 98eca297..2eabbf20 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,17 +12,17 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest] - python-version: [3.6, 3.7, 3.8] + python-version: [3.6, 3.7, 3.8, 3.9] environment: [test] include: - os: ubuntu-latest - python-version: 3.8 + python-version: 3.9 environment: lint - os: ubuntu-latest - python-version: 3.8 + python-version: 3.9 environment: docs - os: ubuntu-latest - python-version: 3.8 + python-version: 3.9 environment: pyre steps: - uses: actions/checkout@v1 @@ -63,13 +63,13 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 - name: pip cache uses: actions/cache@v2 id: cache with: path: '~/cache' - key: pip-ubuntu-latest-3.8-test-${{ hashFiles('tox.ini', 'requirements.txt', 'requirements-dev.txt', 'setup.py') }} + key: pip-ubuntu-latest-3.9-test-${{ hashFiles('tox.ini', 'requirements.txt', 'requirements-dev.txt', 'setup.py') }} - name: install pip run: | python -m pip install --upgrade pip @@ -100,7 +100,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 - name: Install pypa/build run: >- python -m diff --git a/tox.ini b/tox.ini index d2771001..8be5e9b2 100644 --- a/tox.ini +++ b/tox.ini @@ -1,11 +1,12 @@ [tox] -envlist = py36, py37, py38, lint, docs +envlist = py36, py37, py38, py39, lint, docs [gh-actions] python = 3.6: py36 3.7: py37 3.8: py38 + 3.9: py39 [testenv] deps = From 695f844c6d5ca26c4913dd11f35f7250fbef86aa Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 12 Aug 2021 15:18:19 -0400 Subject: [PATCH 126/632] Split up some larger CI/tox steps into separate jobs (#511) --- ...iguration => .pyre_configuration.template} | 2 +- .github/workflows/build.yml | 203 ++++++++++++------ 2 files changed, 140 insertions(+), 65 deletions(-) rename .github/workflows/{.pyre_configuration => .pyre_configuration.template} (60%) diff --git a/.github/workflows/.pyre_configuration b/.github/workflows/.pyre_configuration.template similarity index 60% rename from .github/workflows/.pyre_configuration rename to .github/workflows/.pyre_configuration.template index 83478f3b..dc25a33f 100644 --- a/.github/workflows/.pyre_configuration +++ b/.github/workflows/.pyre_configuration.template @@ -3,7 +3,7 @@ "." ], "search_path": [ - "stubs", "~/cache/tox/pyre/lib/python3.9/site-packages/" + "stubs", "{{ .python_site_packages }}" ], "strict": true } diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2eabbf20..2ba5c8ab 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,84 +2,134 @@ name: Python CI on: [push, pull_request] -env: - PIP_CACHE_DIR: ~/cache/pip - jobs: - tox: +# Install and cache dependencies + setup: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest, macos-latest] python-version: [3.6, 3.7, 3.8, 3.9] - environment: [test] - include: - - os: ubuntu-latest - python-version: 3.9 - environment: lint - - os: ubuntu-latest - python-version: 3.9 - environment: docs - - os: ubuntu-latest - python-version: 3.9 - environment: pyre steps: - uses: actions/checkout@v1 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + - uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - - name: pip cache - uses: actions/cache@v2 - id: cache + - uses: actions/cache@v2 + id: cache with: - path: '~/cache' - key: pip-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.environment }}-${{ hashFiles('tox.ini', 'requirements.txt', 'requirements-dev.txt', 'setup.py') }} - restore-keys: | - pip-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.environment }}- - pip-${{ matrix.os }}-${{ matrix.python-version }}- - pip-${{ matrix.os }}- - - name: install tox + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' run: | - python -m pip install --upgrade pip - pip install tox-gh-actions -r requirements.txt -r requirements-dev.txt - - name: run tox - run: tox --workdir ~/cache/tox -e ${{ matrix.environment }} - - name: Archive Docs - if: matrix.environment == 'docs' - uses: actions/upload-artifact@v2 + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + +# Run unittests + test: + needs: setup + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + python-version: [3.6, 3.7, 3.8, 3.9] + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 with: - name: sphinx-docs - path: docs/build + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Validate Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: exit 1 + - name: Run Tests + run: python -m unittest + +# Run linters + lint: + needs: setup + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Validate Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: exit 1 + - run: flake8 + - run: isort --check-only . + - run: black --check libcst/ + - run: python3 -m fixit.cli.run_rules + +# Run pyre typechecker + typecheck: + needs: setup + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Validate Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: exit 1 + - name: Get Python site-packages + id: python-info + run: | + echo "::set-output name=SITE_PACKAGES::$(python -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')" + - name: Configure Pyre + id: pyre_template + uses: chuhlomin/render-template@v1.2 + with: + template: .github/workflows/.pyre_configuration.template + vars: | + python_site_packages: ${{ steps.python-info.outputs.SITE_PACKAGES }} + - name: Write Pyre Config + run: echo '${{ steps.pyre_template.outputs.result }}' > .pyre_configuration + - run: pyre --version + - run: pyre check + - run: python libcst/tests/test_pyre_integration.py + - run: git diff --exit-code # Upload test coverage coverage: - needs: tox + needs: setup runs-on: ubuntu-latest - steps: - uses: actions/checkout@v1 - - name: Set up Python - uses: actions/setup-python@v2 + - uses: actions/setup-python@v2 with: python-version: 3.9 - - name: pip cache - uses: actions/cache@v2 - id: cache + - uses: actions/cache@v2 + id: cache with: - path: '~/cache' - key: pip-ubuntu-latest-3.9-test-${{ hashFiles('tox.ini', 'requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: install pip - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt -r requirements-dev.txt - - name: generate coverage + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Validate Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: exit 1 + - name: Generate Coverage run: | coverage run setup.py test coverage xml -i - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v2 with: files: coverage.xml fail_ci_if_error: true @@ -90,23 +140,48 @@ jobs: name: coverage path: coverage.xml -# Build python package - build: - needs: tox +# Build the docs + docs: + needs: setup runs-on: ubuntu-latest - steps: - uses: actions/checkout@v1 - - name: Set up Python - uses: actions/setup-python@v2 + - uses: actions/setup-python@v2 with: python-version: 3.9 - - name: Install pypa/build - run: >- - python -m - pip install - build - --user + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Validate Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: exit 1 + - uses: ts-graphviz/setup-graphviz@v1 + - run: sphinx-build docs/source/ docs/build/ + - name: Archive Docs + uses: actions/upload-artifact@v2 + with: + name: sphinx-docs + path: docs/build + +# Build python package + build: + needs: setup + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Validate Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: exit 1 - name: Build a binary wheel and a source tarball run: >- python -m From 5928f6ad81caf98b73631df6567a1af19fa02ddb Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 12 Aug 2021 15:41:23 -0400 Subject: [PATCH 127/632] Windows CI support (#505) * Support windows testing in github actions * Fix path handling in windows --- .github/workflows/build.yml | 4 ++-- libcst/codemod/_cli.py | 29 ++++++++++++----------------- libcst/codemod/tests/test_cli.py | 21 +++++++++++++++++++++ 3 files changed, 35 insertions(+), 19 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2ba5c8ab..45920614 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest] + os: [ubuntu-latest, macos-latest, windows-latest] python-version: [3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v1 @@ -33,7 +33,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest] + os: [ubuntu-latest, macos-latest, windows-latest] python-version: [3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v1 diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 4ecbfb25..e4b7ec45 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -16,7 +16,7 @@ import time import traceback from dataclasses import dataclass, replace from multiprocessing import Pool, cpu_count -from pathlib import Path +from pathlib import Path, PurePath from typing import Any, AnyStr, Dict, List, Optional, Sequence, Union, cast from libcst import PartialParserConfig, parse_module @@ -192,26 +192,21 @@ def _calculate_module(repo_root: Optional[str], filename: str) -> Optional[str]: # We don't have a repo root, so this is impossible to calculate. return None - # Make sure the absolute path for the root ends in a separator. - if repo_root[-1] != os.path.sep: - repo_root = repo_root + os.path.sep - - if not filename.startswith(repo_root): + try: + relative_filename = PurePath(filename).relative_to(repo_root) + except ValueError: # This file seems to be out of the repo root. return None - # Get the relative path, get rid of any special cases and extensions. - relative_filename = filename[len(repo_root) :] - for ending in [ - f"{os.path.sep}__init__.py", - f"{os.path.sep}__main__.py", - ".py", - ]: - if relative_filename.endswith(ending): - relative_filename = relative_filename[: -len(ending)] + # get rid of extension + relative_filename = relative_filename.with_suffix("") - # Now, convert all line separators to dots to represent the python module. - return relative_filename.replace(os.path.sep, ".") + # get rid of any special cases + if relative_filename.stem in ["__init__", "__main__"]: + relative_filename = relative_filename.parent + + # Now, convert to dots to represent the python module. + return ".".join(relative_filename.parts) @dataclass(frozen=True) diff --git a/libcst/codemod/tests/test_cli.py b/libcst/codemod/tests/test_cli.py index a4d1404f..99b4e09a 100644 --- a/libcst/codemod/tests/test_cli.py +++ b/libcst/codemod/tests/test_cli.py @@ -37,6 +37,27 @@ class TestPackageCalculation(UnitTest): "/home/username/root/some/dir/__main__.py", "some.dir", ), + # some windows tests + ( + "c:/Program Files/", + "d:/Program Files/some/dir/file.py", + None, + ), + ( + "c:/Program Files/other/", + "c:/Program Files/some/dir/file.py", + None, + ), + ( + "c:/Program Files/", + "c:/Program Files/some/dir/file.py", + "some.dir.file", + ), + ( + "c:/Program Files/", + "c:/Program Files/some/dir/__main__.py", + "some.dir", + ), ), ) def test_calculate_module( From 5e1e3fe9705d5fe086c3547874508011bda2d3ec Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 25 Aug 2021 20:39:29 -0400 Subject: [PATCH 128/632] The ufmt tool combines usort and black with a consistent wrapper, (#515) which ensures we won't have inconsistent black-vs-isort errors going forward. We can always format by running `ufmt format .` at the root, and check with `ufmt check .` in our CI actions. --- .github/workflows/build.yml | 3 +-- CONTRIBUTING.md | 2 +- README.rst | 4 ++-- docs/source/tutorial.ipynb | 2 +- libcst/__init__.py | 2 -- libcst/_add_slots.py | 1 - libcst/_batched_visitor.py | 1 - libcst/_exceptions.py | 1 - libcst/_flatten_sentinel.py | 1 - libcst/_metadata_dependent.py | 1 - libcst/_nodes/base.py | 1 - libcst/_nodes/internal.py | 1 - libcst/_nodes/module.py | 1 - libcst/_nodes/statement.py | 1 - libcst/_nodes/tests/test_cst_node.py | 1 - libcst/_nodes/whitespace.py | 1 - libcst/_parser/base_parser.py | 1 - libcst/_parser/conversions/expression.py | 1 - libcst/_parser/conversions/statement.py | 1 - libcst/_parser/custom_itertools.py | 1 - libcst/_parser/detect_config.py | 1 - libcst/_parser/entrypoints.py | 1 - libcst/_parser/grammar.py | 1 - libcst/_parser/parso/pgen2/generator.py | 1 - libcst/_parser/parso/python/tokenize.py | 1 - libcst/_parser/parso/tests/test_tokenize.py | 1 - libcst/_parser/parso/utils.py | 1 - libcst/_parser/production_decorator.py | 1 - .../_parser/tests/test_whitespace_parser.py | 1 - libcst/_parser/tests/test_wrapped_tokenize.py | 1 - libcst/_parser/types/config.py | 1 - libcst/_parser/types/conversions.py | 1 - libcst/_parser/types/partials.py | 1 - libcst/_parser/whitespace_parser.py | 1 - libcst/_parser/wrapped_tokenize.py | 1 - libcst/_position.py | 1 - libcst/_type_enforce.py | 1 - libcst/_typed_visitor.py | 8 ++++---- libcst/_typed_visitor_base.py | 1 - libcst/_types.py | 1 - libcst/_visitors.py | 1 - libcst/codegen/gen_matcher_classes.py | 1 - libcst/codegen/gen_type_mapping.py | 1 - libcst/codegen/gen_visitor_functions.py | 1 - libcst/codegen/generate.py | 3 +-- libcst/codemod/__init__.py | 1 - libcst/codemod/_cli.py | 1 - libcst/codemod/_command.py | 1 - libcst/codemod/_dummy_pool.py | 1 - libcst/codemod/_runner.py | 1 - .../convert_percent_format_to_fstring.py | 1 - .../codemod/commands/remove_unused_imports.py | 1 - libcst/codemod/visitors/__init__.py | 1 - .../_gather_string_annotation_names.py | 1 - .../visitors/_gather_unused_imports.py | 1 - libcst/helpers/__init__.py | 1 - libcst/helpers/_template.py | 1 - libcst/matchers/__init__.py | 20 +++++++++---------- libcst/matchers/_decorators.py | 1 - libcst/matchers/_return_types.py | 6 +++--- libcst/matchers/_visitors.py | 1 - libcst/metadata/__init__.py | 1 - libcst/metadata/base_provider.py | 1 - libcst/metadata/full_repo_manager.py | 1 - libcst/metadata/position_provider.py | 1 - libcst/metadata/scope_provider.py | 1 - .../metadata/tests/test_full_repo_manager.py | 1 - libcst/metadata/wrapper.py | 1 - libcst/testing/utils.py | 1 - libcst/tests/test_fuzz.py | 1 - libcst/tests/test_pyre_integration.py | 1 - libcst/tests/test_type_enforce.py | 1 - libcst/tool.py | 1 - pyproject.toml | 8 -------- requirements-dev.txt | 11 +++++----- setup.py | 1 - stubs/tokenize.pyi | 1 - tox.ini | 6 ++---- 78 files changed, 31 insertions(+), 110 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 45920614..71b9f405 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -69,8 +69,7 @@ jobs: if: steps.cache.outputs.cache-hit != 'true' run: exit 1 - run: flake8 - - run: isort --check-only . - - run: black --check libcst/ + - run: ufmt check . - run: python3 -m fixit.cli.run_rules # Run pyre typechecker diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 41a47707..f8eab9f0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -31,7 +31,7 @@ disclosure of security bugs. In those cases, please go through the process outlined on that page and do not file a public issue. ## Coding Style -We use flake8, isort and black to enforce coding style. +We use flake8 and ufmt to enforce coding style. ## License By contributing to LibCST, you agree that your contributions will be licensed diff --git a/README.rst b/README.rst index 3e563a2f..b603be9c 100644 --- a/README.rst +++ b/README.rst @@ -150,8 +150,8 @@ Start by setting up and activating a virtualenv: # If you're done with the virtualenv, you can leave it by running: deactivate -We use `isort `_ and `black `_ -to format code. To format changes to be conformant, run the following in the root: +We use `ufmt `_ to format code. To format +changes to be conformant, run the following in the root: .. code-block:: shell diff --git a/docs/source/tutorial.ipynb b/docs/source/tutorial.ipynb index 1fe57070..29e59808 100644 --- a/docs/source/tutorial.ipynb +++ b/docs/source/tutorial.ipynb @@ -186,7 +186,7 @@ "source": [ "Generate Source Code\n", "====================\n", - "Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `Black `_ and `isort `_ to reformate the code to keep a consistent coding style." + "Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `ufmt `_ to reformate the code to keep a consistent coding style." ] }, { diff --git a/libcst/__init__.py b/libcst/__init__.py index 8d9210e5..01c1850c 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -192,7 +192,6 @@ from libcst._parser.types.config import ( from libcst._removal_sentinel import RemovalSentinel, RemoveFromParent from libcst._visitors import CSTNodeT, CSTTransformer, CSTVisitor, CSTVisitorT - try: from libcst._version import version as LIBCST_VERSION except ImportError: @@ -207,7 +206,6 @@ from libcst.metadata.base_provider import ( ) from libcst.metadata.wrapper import MetadataWrapper - __all__ = [ "KNOWN_PYTHON_VERSION_STRINGS", "LIBCST_VERSION", diff --git a/libcst/_add_slots.py b/libcst/_add_slots.py index 706f5d10..a62b2f2d 100644 --- a/libcst/_add_slots.py +++ b/libcst/_add_slots.py @@ -5,7 +5,6 @@ import dataclasses from typing import Any, Mapping, Type, TypeVar - _T = TypeVar("_T") diff --git a/libcst/_batched_visitor.py b/libcst/_batched_visitor.py index 9264c4c9..68340108 100644 --- a/libcst/_batched_visitor.py +++ b/libcst/_batched_visitor.py @@ -19,7 +19,6 @@ from libcst._metadata_dependent import MetadataDependent from libcst._typed_visitor import CSTTypedVisitorFunctions from libcst._visitors import CSTNodeT, CSTVisitor - if TYPE_CHECKING: from libcst._nodes.base import CSTNode # noqa: F401 diff --git a/libcst/_exceptions.py b/libcst/_exceptions.py index fe41f0ee..1e84aab0 100644 --- a/libcst/_exceptions.py +++ b/libcst/_exceptions.py @@ -13,7 +13,6 @@ from libcst._parser.parso.python.token import PythonTokenTypes, TokenType from libcst._parser.types.token import Token from libcst._tabs import expand_tabs - _EOF_STR: str = "end of file (EOF)" _INDENT_STR: str = "an indent" _DEDENT_STR: str = "a dedent" diff --git a/libcst/_flatten_sentinel.py b/libcst/_flatten_sentinel.py index 18148077..44865050 100644 --- a/libcst/_flatten_sentinel.py +++ b/libcst/_flatten_sentinel.py @@ -5,7 +5,6 @@ import sys - # PEP 585 if sys.version_info < (3, 9): from typing import Iterable, Sequence diff --git a/libcst/_metadata_dependent.py b/libcst/_metadata_dependent.py index c1627713..60423017 100644 --- a/libcst/_metadata_dependent.py +++ b/libcst/_metadata_dependent.py @@ -17,7 +17,6 @@ from typing import ( cast, ) - if TYPE_CHECKING: # Circular dependency for typing reasons only from libcst._nodes.base import CSTNode # noqa: F401 diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index 47bf26ea..a1f659d9 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -15,7 +15,6 @@ from libcst._type_enforce import is_value_of_type from libcst._types import CSTNodeT from libcst._visitors import CSTTransformer, CSTVisitor, CSTVisitorT - _CSTNodeSelfT = TypeVar("_CSTNodeSelfT", bound="CSTNode") _EMPTY_SEQUENCE: Sequence["CSTNode"] = () diff --git a/libcst/_nodes/internal.py b/libcst/_nodes/internal.py index 5bbefc01..b8294fb6 100644 --- a/libcst/_nodes/internal.py +++ b/libcst/_nodes/internal.py @@ -14,7 +14,6 @@ from libcst._maybe_sentinel import MaybeSentinel from libcst._removal_sentinel import RemovalSentinel from libcst._types import CSTNodeT - if TYPE_CHECKING: # These are circular dependencies only used for typing purposes from libcst._nodes.base import CSTNode # noqa: F401 diff --git a/libcst/_nodes/module.py b/libcst/_nodes/module.py index 59a4507d..2bd26016 100644 --- a/libcst/_nodes/module.py +++ b/libcst/_nodes/module.py @@ -18,7 +18,6 @@ from libcst._nodes.whitespace import EmptyLine from libcst._removal_sentinel import RemovalSentinel from libcst._visitors import CSTVisitorT - if TYPE_CHECKING: # This is circular, so import the type only in type checking from libcst._parser.types.config import PartialParserConfig diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 6a831b85..5e8068ee 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -49,7 +49,6 @@ from libcst._nodes.whitespace import ( ) from libcst._visitors import CSTVisitorT - _INDENT_WHITESPACE_RE: Pattern[str] = re.compile(r"[ \f\t]+", re.UNICODE) diff --git a/libcst/_nodes/tests/test_cst_node.py b/libcst/_nodes/tests/test_cst_node.py index e3cb7e9a..15f0f7ab 100644 --- a/libcst/_nodes/tests/test_cst_node.py +++ b/libcst/_nodes/tests/test_cst_node.py @@ -12,7 +12,6 @@ from libcst._types import CSTNodeT from libcst._visitors import CSTTransformer from libcst.testing.utils import UnitTest, data_provider, none_throws - _EMPTY_SIMPLE_WHITESPACE = cst.SimpleWhitespace("") diff --git a/libcst/_nodes/whitespace.py b/libcst/_nodes/whitespace.py index 22182ebe..e16eea43 100644 --- a/libcst/_nodes/whitespace.py +++ b/libcst/_nodes/whitespace.py @@ -19,7 +19,6 @@ from libcst._nodes.internal import ( ) from libcst._visitors import CSTVisitorT - # SimpleWhitespace includes continuation characters, which must be followed immediately # by a newline. SimpleWhitespace does not include other kinds of newlines, because those # may have semantic significance. diff --git a/libcst/_parser/base_parser.py b/libcst/_parser/base_parser.py index dc7f75ee..35dafba4 100644 --- a/libcst/_parser/base_parser.py +++ b/libcst/_parser/base_parser.py @@ -36,7 +36,6 @@ from libcst._parser.parso.pgen2.generator import DFAState, Grammar, ReservedStri from libcst._parser.parso.python.token import TokenType from libcst._parser.types.token import Token - _NodeT = TypeVar("_NodeT") _TokenTypeT = TypeVar("_TokenTypeT", bound=TokenType) _TokenT = TypeVar("_TokenT", bound=Token) diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index aa84a4e1..59f2defa 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -121,7 +121,6 @@ from libcst._parser.types.partials import ( from libcst._parser.types.token import Token from libcst._parser.whitespace_parser import parse_parenthesizable_whitespace - BINOP_TOKEN_LUT: typing.Dict[str, typing.Type[BaseBinaryOp]] = { "*": Multiply, "@": MatrixMultiply, diff --git a/libcst/_parser/conversions/statement.py b/libcst/_parser/conversions/statement.py index 8ff7ac8f..ae0b1d17 100644 --- a/libcst/_parser/conversions/statement.py +++ b/libcst/_parser/conversions/statement.py @@ -101,7 +101,6 @@ from libcst._parser.whitespace_parser import ( parse_simple_whitespace, ) - AUGOP_TOKEN_LUT: Dict[str, Type[BaseAugOp]] = { "+=": AddAssign, "-=": SubtractAssign, diff --git a/libcst/_parser/custom_itertools.py b/libcst/_parser/custom_itertools.py index ccbb1a1f..867c934e 100644 --- a/libcst/_parser/custom_itertools.py +++ b/libcst/_parser/custom_itertools.py @@ -6,7 +6,6 @@ from itertools import zip_longest from typing import Iterable, Iterator, TypeVar - _T = TypeVar("_T") diff --git a/libcst/_parser/detect_config.py b/libcst/_parser/detect_config.py index 0209d0a2..ca13e7c2 100644 --- a/libcst/_parser/detect_config.py +++ b/libcst/_parser/detect_config.py @@ -18,7 +18,6 @@ from libcst._parser.types.config import AutoConfig, ParserConfig, PartialParserC from libcst._parser.types.token import Token from libcst._parser.wrapped_tokenize import tokenize_lines - _INDENT: TokenType = PythonTokenTypes.INDENT _NAME: TokenType = PythonTokenTypes.NAME _NEWLINE: TokenType = PythonTokenTypes.NEWLINE diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index 1a27e976..f9e78ad8 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -20,7 +20,6 @@ from libcst._parser.grammar import get_grammar, validate_grammar from libcst._parser.python_parser import PythonCSTParser from libcst._parser.types.config import PartialParserConfig - _DEFAULT_PARTIAL_PARSER_CONFIG: PartialParserConfig = PartialParserConfig() diff --git a/libcst/_parser/grammar.py b/libcst/_parser/grammar.py index c8c24025..b86483c0 100644 --- a/libcst/_parser/grammar.py +++ b/libcst/_parser/grammar.py @@ -141,7 +141,6 @@ from libcst._parser.types.config import AutoConfig from libcst._parser.types.conversions import NonterminalConversion, TerminalConversion from libcst._parser.types.production import Production - # Keep this sorted alphabetically _TERMINAL_CONVERSIONS_SEQUENCE: Tuple[TerminalConversion, ...] = ( convert_DEDENT, diff --git a/libcst/_parser/parso/pgen2/generator.py b/libcst/_parser/parso/pgen2/generator.py index 546cc85f..4e20e89d 100644 --- a/libcst/_parser/parso/pgen2/generator.py +++ b/libcst/_parser/parso/pgen2/generator.py @@ -39,7 +39,6 @@ from typing import Any, Generic, Mapping, Sequence, Set, TypeVar, Union from libcst._parser.parso.pgen2.grammar_parser import GrammarParser, NFAState - _TokenTypeT = TypeVar("_TokenTypeT") diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index 6b30c6d0..e816cd62 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -39,7 +39,6 @@ from typing import Dict, Generator, Iterable, Optional, Pattern, Set, Tuple from libcst._parser.parso.python.token import PythonTokenTypes from libcst._parser.parso.utils import PythonVersionInfo, split_lines - # Maximum code point of Unicode 6.0: 0x10ffff (1,114,111) MAX_UNICODE = "\U0010ffff" BOM_UTF8_STRING = BOM_UTF8.decode("utf-8") diff --git a/libcst/_parser/parso/tests/test_tokenize.py b/libcst/_parser/parso/tests/test_tokenize.py index f2c62d33..d0834ad7 100644 --- a/libcst/_parser/parso/tests/test_tokenize.py +++ b/libcst/_parser/parso/tests/test_tokenize.py @@ -22,7 +22,6 @@ from libcst._parser.parso.python.tokenize import PythonToken, tokenize from libcst._parser.parso.utils import parse_version_string, split_lines from libcst.testing.utils import UnitTest, data_provider - # To make it easier to access some of the token types, just put them here. NAME = PythonTokenTypes.NAME NEWLINE = PythonTokenTypes.NEWLINE diff --git a/libcst/_parser/parso/utils.py b/libcst/_parser/parso/utils.py index 27b93731..0e4b4949 100644 --- a/libcst/_parser/parso/utils.py +++ b/libcst/_parser/parso/utils.py @@ -23,7 +23,6 @@ from ast import literal_eval from dataclasses import dataclass from typing import Optional, Sequence, Tuple, Union - # The following is a list in Python that are line breaks in str.splitlines, but # not in Python. In Python only \r (Carriage Return, 0xD) and \n (Line Feed, # 0xA) are allowed to split lines. diff --git a/libcst/_parser/production_decorator.py b/libcst/_parser/production_decorator.py index c982bc8d..ffa0d038 100644 --- a/libcst/_parser/production_decorator.py +++ b/libcst/_parser/production_decorator.py @@ -8,7 +8,6 @@ from typing import Callable, Optional, Sequence, TypeVar from libcst._parser.types.conversions import NonterminalConversion from libcst._parser.types.production import Production - _NonterminalConversionT = TypeVar( "_NonterminalConversionT", bound=NonterminalConversion ) diff --git a/libcst/_parser/tests/test_whitespace_parser.py b/libcst/_parser/tests/test_whitespace_parser.py index dcbafa7e..17996b47 100644 --- a/libcst/_parser/tests/test_whitespace_parser.py +++ b/libcst/_parser/tests/test_whitespace_parser.py @@ -17,7 +17,6 @@ from libcst._parser.whitespace_parser import ( ) from libcst.testing.utils import UnitTest, data_provider - _T = TypeVar("_T") diff --git a/libcst/_parser/tests/test_wrapped_tokenize.py b/libcst/_parser/tests/test_wrapped_tokenize.py index 56bf3dbd..e131f620 100644 --- a/libcst/_parser/tests/test_wrapped_tokenize.py +++ b/libcst/_parser/tests/test_wrapped_tokenize.py @@ -13,7 +13,6 @@ from libcst._parser.types.whitespace_state import WhitespaceState from libcst._parser.wrapped_tokenize import Token, tokenize from libcst.testing.utils import UnitTest, data_provider - _PY38 = parse_version_string("3.8.0") _PY37 = parse_version_string("3.7.0") _PY36 = parse_version_string("3.6.0") diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index 7c76e4c7..13778b2a 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -16,7 +16,6 @@ from libcst._add_slots import add_slots from libcst._nodes.whitespace import NEWLINE_RE from libcst._parser.parso.utils import PythonVersionInfo, parse_version_string - _INDENT_RE: Pattern[str] = re.compile(r"[ \t]+") diff --git a/libcst/_parser/types/conversions.py b/libcst/_parser/types/conversions.py index d0193624..9951e6da 100644 --- a/libcst/_parser/types/conversions.py +++ b/libcst/_parser/types/conversions.py @@ -8,7 +8,6 @@ from typing import Any, Callable, Sequence from libcst._parser.types.config import ParserConfig from libcst._parser.types.token import Token - # pyre-fixme[33]: Aliased annotation cannot contain `Any`. NonterminalConversion = Callable[[ParserConfig, Sequence[Any]], Any] # pyre-fixme[33]: Aliased annotation cannot contain `Any`. diff --git a/libcst/_parser/types/partials.py b/libcst/_parser/types/partials.py index a53f3778..abd35071 100644 --- a/libcst/_parser/types/partials.py +++ b/libcst/_parser/types/partials.py @@ -29,7 +29,6 @@ from libcst._nodes.statement import AsName, BaseSmallStatement, Decorator, Impor from libcst._nodes.whitespace import EmptyLine, SimpleWhitespace, TrailingWhitespace from libcst._parser.types.whitespace_state import WhitespaceState - _T = TypeVar("_T") diff --git a/libcst/_parser/whitespace_parser.py b/libcst/_parser/whitespace_parser.py index b9df6c7e..27892a27 100644 --- a/libcst/_parser/whitespace_parser.py +++ b/libcst/_parser/whitespace_parser.py @@ -31,7 +31,6 @@ from libcst._nodes.whitespace import ( from libcst._parser.types.config import BaseWhitespaceParserConfig from libcst._parser.types.whitespace_state import WhitespaceState as State - # BEGIN PARSER ENTRYPOINTS diff --git a/libcst/_parser/wrapped_tokenize.py b/libcst/_parser/wrapped_tokenize.py index 6104757d..d77ed68c 100644 --- a/libcst/_parser/wrapped_tokenize.py +++ b/libcst/_parser/wrapped_tokenize.py @@ -35,7 +35,6 @@ from libcst._parser.parso.utils import PythonVersionInfo, split_lines from libcst._parser.types.token import Token from libcst._parser.types.whitespace_state import WhitespaceState - _ERRORTOKEN: TokenType = PythonTokenTypes.ERRORTOKEN _ERROR_DEDENT: TokenType = PythonTokenTypes.ERROR_DEDENT diff --git a/libcst/_position.py b/libcst/_position.py index 82411402..be99d4d3 100644 --- a/libcst/_position.py +++ b/libcst/_position.py @@ -17,7 +17,6 @@ from typing import Tuple, Union, cast, overload from libcst._add_slots import add_slots - _CodePositionT = Union[Tuple[int, int], "CodePosition"] diff --git a/libcst/_type_enforce.py b/libcst/_type_enforce.py index 12cb423f..92f158d3 100644 --- a/libcst/_type_enforce.py +++ b/libcst/_type_enforce.py @@ -8,7 +8,6 @@ from typing import Any, Iterable, Mapping, MutableMapping, MutableSequence, Tupl from typing_extensions import Literal from typing_inspect import get_args, get_origin, is_classvar, is_typevar, is_union_type - try: # py37+ from typing import ForwardRef except ImportError: # py36 diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 0246c718..f536ca3a 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -5,7 +5,7 @@ # This file was generated by libcst.codegen.gen_matcher_classes -from typing import TYPE_CHECKING, Optional, Union +from typing import Optional, Union, TYPE_CHECKING from libcst._flatten_sentinel import FlattenSentinel from libcst._maybe_sentinel import MaybeSentinel @@ -28,10 +28,10 @@ if TYPE_CHECKING: BinaryOperation, BooleanOperation, Call, - Comparison, - ComparisonTarget, CompFor, CompIf, + Comparison, + ComparisonTarget, ConcatenatedString, Dict, DictComp, @@ -57,9 +57,9 @@ if TYPE_CHECKING: Name, NamedExpr, Param, - Parameters, ParamSlash, ParamStar, + Parameters, RightCurlyBrace, RightParen, RightSquareBracket, diff --git a/libcst/_typed_visitor_base.py b/libcst/_typed_visitor_base.py index 8525b050..41b115c1 100644 --- a/libcst/_typed_visitor_base.py +++ b/libcst/_typed_visitor_base.py @@ -5,7 +5,6 @@ from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast - if TYPE_CHECKING: from libcst._typed_visitor import CSTTypedBaseFunctions # noqa: F401 diff --git a/libcst/_types.py b/libcst/_types.py index b6b2ea9c..8c5c380f 100644 --- a/libcst/_types.py +++ b/libcst/_types.py @@ -6,7 +6,6 @@ from typing import TYPE_CHECKING, TypeVar - if TYPE_CHECKING: from libcst._nodes.base import CSTNode # noqa: F401 diff --git a/libcst/_visitors.py b/libcst/_visitors.py index 8da37dbf..6bb8557b 100644 --- a/libcst/_visitors.py +++ b/libcst/_visitors.py @@ -11,7 +11,6 @@ from libcst._removal_sentinel import RemovalSentinel from libcst._typed_visitor import CSTTypedTransformerFunctions, CSTTypedVisitorFunctions from libcst._types import CSTNodeT - if TYPE_CHECKING: # Circular dependency for typing reasons only from libcst._nodes.base import CSTNode # noqa: F401 diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index 7bb7120d..b3475570 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -11,7 +11,6 @@ import libcst as cst from libcst import ensure_type, parse_expression from libcst.codegen.gather import all_libcst_nodes, typeclasses - CST_DIR: Set[str] = set(dir(cst)) CLASS_RE = r"" OPTIONAL_RE = r"typing\.Union\[([^,]*?), NoneType]" diff --git a/libcst/codegen/gen_type_mapping.py b/libcst/codegen/gen_type_mapping.py index 2f6b2a9d..0f229e06 100644 --- a/libcst/codegen/gen_type_mapping.py +++ b/libcst/codegen/gen_type_mapping.py @@ -7,7 +7,6 @@ from typing import List from libcst.codegen.gather import imports, nodebases, nodeuses - generated_code: List[str] = [] generated_code.append("# Copyright (c) Facebook, Inc. and its affiliates.") generated_code.append("#") diff --git a/libcst/codegen/gen_visitor_functions.py b/libcst/codegen/gen_visitor_functions.py index 0666691b..07bcf95a 100644 --- a/libcst/codegen/gen_visitor_functions.py +++ b/libcst/codegen/gen_visitor_functions.py @@ -8,7 +8,6 @@ from typing import List from libcst.codegen.gather import imports, nodebases, nodeuses - generated_code: List[str] = [] generated_code.append("# Copyright (c) Facebook, Inc. and its affiliates.") generated_code.append("#") diff --git a/libcst/codegen/generate.py b/libcst/codegen/generate.py index 60a952f2..6779c17b 100644 --- a/libcst/codegen/generate.py +++ b/libcst/codegen/generate.py @@ -26,8 +26,7 @@ from libcst.codegen.transforms import ( def format_file(fname: str) -> None: with open(os.devnull, "w") as devnull: - subprocess.check_call(["isort", "-q", fname], stdout=devnull, stderr=devnull) - subprocess.check_call(["black", fname], stdout=devnull, stderr=devnull) + subprocess.check_call(["ufmt", "format", fname], stdout=devnull, stderr=devnull) def clean_generated_code(code: str) -> str: diff --git a/libcst/codemod/__init__.py b/libcst/codemod/__init__.py index b2b2feab..8aec421a 100644 --- a/libcst/codemod/__init__.py +++ b/libcst/codemod/__init__.py @@ -30,7 +30,6 @@ from libcst.codemod._runner import ( from libcst.codemod._testing import CodemodTest from libcst.codemod._visitor import ContextAwareTransformer, ContextAwareVisitor - __all__ = [ "Codemod", "CodemodContext", diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index e4b7ec45..1dff878f 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -34,7 +34,6 @@ from libcst.codemod._runner import ( ) from libcst.metadata import FullRepoManager - _DEFAULT_GENERATED_CODE_MARKER: str = f"@gen{''}erated" diff --git a/libcst/codemod/_command.py b/libcst/codemod/_command.py index 1a11e91e..0196eb27 100644 --- a/libcst/codemod/_command.py +++ b/libcst/codemod/_command.py @@ -15,7 +15,6 @@ from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._remove_imports import RemoveImportsVisitor - _Codemod = TypeVar("_Codemod", bound=Codemod) diff --git a/libcst/codemod/_dummy_pool.py b/libcst/codemod/_dummy_pool.py index 922037dd..f80e06de 100644 --- a/libcst/codemod/_dummy_pool.py +++ b/libcst/codemod/_dummy_pool.py @@ -6,7 +6,6 @@ from types import TracebackType from typing import Callable, Generator, Iterable, Optional, Type, TypeVar - RetT = TypeVar("RetT") ArgT = TypeVar("ArgT") diff --git a/libcst/codemod/_runner.py b/libcst/codemod/_runner.py index a4b68dd4..1748bd9e 100644 --- a/libcst/codemod/_runner.py +++ b/libcst/codemod/_runner.py @@ -16,7 +16,6 @@ from typing import Optional, Sequence, Union from libcst import PartialParserConfig, parse_module from libcst.codemod._codemod import Codemod - # All datastructures defined in this class are pickleable so that they can be used # as a return value with the multiprocessing module. diff --git a/libcst/codemod/commands/convert_percent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py index 35032719..2190f808 100644 --- a/libcst/codemod/commands/convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/convert_percent_format_to_fstring.py @@ -10,7 +10,6 @@ import libcst as cst import libcst.matchers as m from libcst.codemod import VisitorBasedCodemodCommand - USE_FSTRING_SIMPLE_EXPRESSION_MAX_LENGTH = 30 diff --git a/libcst/codemod/commands/remove_unused_imports.py b/libcst/codemod/commands/remove_unused_imports.py index 741f9a46..b11e44c0 100644 --- a/libcst/codemod/commands/remove_unused_imports.py +++ b/libcst/codemod/commands/remove_unused_imports.py @@ -12,7 +12,6 @@ from libcst.codemod.visitors import GatherCommentsVisitor, RemoveImportsVisitor from libcst.helpers import get_absolute_module_for_import from libcst.metadata import PositionProvider, ProviderT - DEFAULT_SUPPRESS_COMMENT_REGEX = ( r".*\W(noqa|lint-ignore: ?unused-import|lint-ignore: ?F401)(\W.*)?$" ) diff --git a/libcst/codemod/visitors/__init__.py b/libcst/codemod/visitors/__init__.py index bcc570be..12f36f65 100644 --- a/libcst/codemod/visitors/__init__.py +++ b/libcst/codemod/visitors/__init__.py @@ -14,7 +14,6 @@ from libcst.codemod.visitors._gather_string_annotation_names import ( from libcst.codemod.visitors._gather_unused_imports import GatherUnusedImportsVisitor from libcst.codemod.visitors._remove_imports import RemoveImportsVisitor - __all__ = [ "AddImportsVisitor", "ApplyTypeAnnotationsVisitor", diff --git a/libcst/codemod/visitors/_gather_string_annotation_names.py b/libcst/codemod/visitors/_gather_string_annotation_names.py index c3d62445..244646f9 100644 --- a/libcst/codemod/visitors/_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/_gather_string_annotation_names.py @@ -11,7 +11,6 @@ from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareVisitor from libcst.metadata import MetadataWrapper, QualifiedNameProvider - FUNCS_CONSIDERED_AS_STRING_ANNOTATIONS = {"typing.TypeVar"} diff --git a/libcst/codemod/visitors/_gather_unused_imports.py b/libcst/codemod/visitors/_gather_unused_imports.py index 82860325..89f37844 100644 --- a/libcst/codemod/visitors/_gather_unused_imports.py +++ b/libcst/codemod/visitors/_gather_unused_imports.py @@ -17,7 +17,6 @@ from libcst.codemod.visitors._gather_string_annotation_names import ( from libcst.metadata import ProviderT, ScopeProvider from libcst.metadata.scope_provider import _gen_dotted_names - MODULES_IGNORED_BY_DEFAULT = {"__future__"} diff --git a/libcst/helpers/__init__.py b/libcst/helpers/__init__.py index 77c4389c..4621c58f 100644 --- a/libcst/helpers/__init__.py +++ b/libcst/helpers/__init__.py @@ -20,7 +20,6 @@ from libcst.helpers.expression import ( ) from libcst.helpers.module import insert_header_comments - __all__ = [ "get_absolute_module_for_import", "get_absolute_module_for_import_or_raise", diff --git a/libcst/helpers/_template.py b/libcst/helpers/_template.py index b1be6e5c..d1f02252 100644 --- a/libcst/helpers/_template.py +++ b/libcst/helpers/_template.py @@ -9,7 +9,6 @@ from typing import Dict, Mapping, Optional, Set, Union import libcst as cst from libcst.helpers.common import ensure_type - TEMPLATE_PREFIX: str = "__LIBCST_MANGLED_NAME_" TEMPLATE_SUFFIX: str = "_EMAN_DELGNAM_TSCBIL__" diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 8b7ba63f..907c6ff3 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -11,25 +11,25 @@ from typing import Callable, Optional, Sequence, Union from typing_extensions import Literal import libcst as cst -from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit +from libcst.matchers._decorators import call_if_inside, call_if_not_inside, visit, leave from libcst.matchers._matcher_base import ( AbstractBaseMatcherNodeMeta, - AllOf, - AtLeastN, - AtMostN, BaseMatcherNode, - DoesNotMatch, - DoNotCare, DoNotCareSentinel, + DoNotCare, + TypeOf, + OneOf, + AllOf, + DoesNotMatch, MatchIfTrue, + MatchRegex, MatchMetadata, MatchMetadataIfTrue, - MatchRegex, - OneOf, - SaveMatchedNode, - TypeOf, ZeroOrMore, + AtLeastN, ZeroOrOne, + AtMostN, + SaveMatchedNode, extract, extractall, findall, diff --git a/libcst/matchers/_decorators.py b/libcst/matchers/_decorators.py index 7dd2e741..b6b236cc 100644 --- a/libcst/matchers/_decorators.py +++ b/libcst/matchers/_decorators.py @@ -7,7 +7,6 @@ from typing import Callable, TypeVar from libcst.matchers._matcher_base import BaseMatcherNode - _CSTVisitFuncT = TypeVar("_CSTVisitFuncT") diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index d8a22986..0a9152fe 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -23,10 +23,10 @@ from libcst._nodes.expression import ( BinaryOperation, BooleanOperation, Call, - Comparison, - ComparisonTarget, CompFor, CompIf, + Comparison, + ComparisonTarget, ConcatenatedString, Dict, DictComp, @@ -52,9 +52,9 @@ from libcst._nodes.expression import ( Name, NamedExpr, Param, - Parameters, ParamSlash, ParamStar, + Parameters, RightCurlyBrace, RightParen, RightSquareBracket, diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index be50edfd..0c3b65a4 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -45,7 +45,6 @@ from libcst.matchers._matcher_base import ( ) from libcst.matchers._return_types import TYPED_FUNCTION_RETURN_MAPPING - CONCRETE_METHODS: Set[str] = { *{f"visit_{cls.__name__}" for cls in TYPED_FUNCTION_RETURN_MAPPING}, *{f"leave_{cls.__name__}" for cls in TYPED_FUNCTION_RETURN_MAPPING}, diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index bada687e..55a48cb5 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -50,7 +50,6 @@ from libcst.metadata.span_provider import ByteSpanPositionProvider, CodeSpan from libcst.metadata.type_inference_provider import TypeInferenceProvider from libcst.metadata.wrapper import MetadataWrapper - __all__ = [ "CodePosition", "CodeRange", diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index b2910f5c..24949b53 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -26,7 +26,6 @@ from libcst._metadata_dependent import ( ) from libcst._visitors import CSTVisitor - if TYPE_CHECKING: from libcst._nodes.base import CSTNode from libcst._nodes.module import Module, _ModuleSelfT as _ModuleT diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index 0baf5b58..cec74e7f 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -10,7 +10,6 @@ from typing import TYPE_CHECKING, Collection, Dict, List, Mapping import libcst as cst from libcst.metadata.wrapper import MetadataWrapper - if TYPE_CHECKING: from libcst.metadata.base_provider import ProviderT # noqa: F401 diff --git a/libcst/metadata/position_provider.py b/libcst/metadata/position_provider.py index 7bd1e713..004487b3 100644 --- a/libcst/metadata/position_provider.py +++ b/libcst/metadata/position_provider.py @@ -16,7 +16,6 @@ from libcst._nodes.module import Module from libcst._position import CodePosition, CodeRange from libcst.metadata.base_provider import BaseMetadataProvider - NEWLINE_RE: Pattern[str] = re.compile(r"\r\n?|\n") diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 3b5d380e..821855c9 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -34,7 +34,6 @@ from libcst.metadata.expression_context_provider import ( ExpressionContextProvider, ) - # Comprehensions are handled separately in _visit_comp_alike due to # the complexity of the semantics _ASSIGNMENT_LIKE_NODES = ( diff --git a/libcst/metadata/tests/test_full_repo_manager.py b/libcst/metadata/tests/test_full_repo_manager.py index ff7ebfd0..c7a458d1 100644 --- a/libcst/metadata/tests/test_full_repo_manager.py +++ b/libcst/metadata/tests/test_full_repo_manager.py @@ -12,7 +12,6 @@ from libcst.metadata.tests.test_type_inference_provider import _test_simple_clas from libcst.metadata.type_inference_provider import TypeInferenceProvider from libcst.testing.utils import UnitTest - REPO_ROOT_DIR: str = str(Path(__file__).parent.parent.parent.resolve()) diff --git a/libcst/metadata/wrapper.py b/libcst/metadata/wrapper.py index 6c31b17f..9ff9b4a0 100644 --- a/libcst/metadata/wrapper.py +++ b/libcst/metadata/wrapper.py @@ -25,7 +25,6 @@ from libcst._batched_visitor import BatchableCSTVisitor, VisitorMethod, visit_ba from libcst._exceptions import MetadataException from libcst.metadata.base_provider import BatchableMetadataProvider - if TYPE_CHECKING: from libcst._nodes.base import CSTNode # noqa: F401 from libcst._nodes.module import Module # noqa: F401 diff --git a/libcst/testing/utils.py b/libcst/testing/utils.py index cba70ed1..5a1e2023 100644 --- a/libcst/testing/utils.py +++ b/libcst/testing/utils.py @@ -22,7 +22,6 @@ from typing import ( ) from unittest import TestCase - DATA_PROVIDER_DATA_ATTR_NAME = "__data_provider_data" DATA_PROVIDER_DESCRIPTION_PREFIX = "_data_provider_" PROVIDER_TEST_LIMIT_ATTR_NAME = "__provider_test_limit" diff --git a/libcst/tests/test_fuzz.py b/libcst/tests/test_fuzz.py index 590449c7..66b32276 100644 --- a/libcst/tests/test_fuzz.py +++ b/libcst/tests/test_fuzz.py @@ -20,7 +20,6 @@ from hypothesmith import from_grammar import libcst - # If in doubt, you should use these "unit test" settings. They tune the timeouts # and example-reproduction behaviour for these tests' unusually large inputs. hypothesis.settings.register_profile( diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 6192dcff..98675787 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -17,7 +17,6 @@ from libcst.metadata.type_inference_provider import ( ) from libcst.testing.utils import UnitTest, data_provider - TEST_SUITE_PATH: Path = Path(__file__).parent / "pyre" diff --git a/libcst/tests/test_type_enforce.py b/libcst/tests/test_type_enforce.py index edc283e5..aa417156 100644 --- a/libcst/tests/test_type_enforce.py +++ b/libcst/tests/test_type_enforce.py @@ -28,7 +28,6 @@ from typing_extensions import Literal from libcst._type_enforce import is_value_of_type from libcst.testing.utils import UnitTest, data_provider - if TYPE_CHECKING: from collections import Counter # noqa: F401 diff --git a/libcst/tool.py b/libcst/tool.py index fff190c2..44fd367a 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -40,7 +40,6 @@ from libcst.codemod import ( parallel_exec_transform_with_prettyprint, ) - _DEFAULT_INDENT: str = " " diff --git a/pyproject.toml b/pyproject.toml index c55f8d4e..c9a93f4d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,2 @@ [tool.black] target-version = ["py36"] - -[tool.isort] -line_length = 88 -multi_line_output = 3 -include_trailing_comma = true -force_grid_wrap = 0 -lines_after_imports = 2 -combine_as_imports = true diff --git a/requirements-dev.txt b/requirements-dev.txt index 0dd8f54d..5efb0607 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,14 +2,15 @@ black==20.8b1 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8 +git+https://github.com/jimmylai/sphinx.git@slots_type_annotation hypothesis>=4.36.0 hypothesmith>=0.0.4 -git+https://github.com/jimmylai/sphinx.git@slots_type_annotation -isort==5.5.3 jupyter>=1.0.0 nbsphinx>=0.4.2 -pyre-check==0.0.41 -sphinx-rtd-theme>=0.4.3 prompt-toolkit>=2.0.9 -tox>=3.18.1 +pyre-check==0.0.41 setuptools_scm>=6.0.1 +sphinx-rtd-theme>=0.4.3 +tox>=3.18.1 +ufmt==1.2 +usort==0.6.3 diff --git a/setup.py b/setup.py index f554140d..28a404ac 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,6 @@ from os import path import setuptools - # Grab the readme so that our package stays in sync with github. this_directory: str = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.rst"), encoding="utf-8") as f: diff --git a/stubs/tokenize.pyi b/stubs/tokenize.pyi index 284dbf36..2db19c04 100644 --- a/stubs/tokenize.pyi +++ b/stubs/tokenize.pyi @@ -66,7 +66,6 @@ from token import ( ) from typing import Callable, Generator, Sequence, Tuple - Hexnumber: str = ... Binnumber: str = ... Octnumber: str = ... diff --git a/tox.ini b/tox.ini index 8be5e9b2..1c205721 100644 --- a/tox.ini +++ b/tox.ini @@ -20,8 +20,7 @@ commands = [testenv:lint] commands = flake8 {posargs} - isort --check-only {posargs:.} - black --check {posargs:libcst/} + ufmt check {posargs:.} python3 -m fixit.cli.run_rules [testenv:docs] @@ -31,8 +30,7 @@ commands = [testenv:autofix] commands = flake8 {posargs} - isort -q {posargs:.} - black {posargs:libcst/} + ufmt format {posargs:.} python3 -m fixit.cli.apply_fix [testenv:pyre] From 96a0b53bfed82adc8f12ec23078b3d1ef6ddad81 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 26 Aug 2021 08:31:38 -0400 Subject: [PATCH 129/632] Extract common utilities in ApplyTypeAnnotationsVisitor tests (#514) All of our tests follow one of two patterns: either populate a context and transform using the default behavior, or test when setting flags in either the context population and transform steps (and verify that the behavior is the same in both cases). So, extract these two patterns into helper functions. This improves readability of the existing code a bit, and will be even more helpful if we split apart the monster test `test_annotate_functions` (which I would like to do soon - the list of test cases is so big that it's hard to jump to the relevant section when trying to verify behaviors). --- .../tests/test_apply_type_annotations.py | 98 ++++++++++--------- 1 file changed, 52 insertions(+), 46 deletions(-) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 16ccda99..eaaeb1c7 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -7,7 +7,7 @@ import sys import textwrap import unittest -from typing import Type +from typing import Dict, Type from libcst import parse_module from libcst.codemod import Codemod, CodemodContext, CodemodTest @@ -18,6 +18,41 @@ from libcst.testing.utils import data_provider class TestApplyAnnotationsVisitor(CodemodTest): TRANSFORM: Type[Codemod] = ApplyTypeAnnotationsVisitor + def run_simple_test_case( + self, + stub: str, + before: str, + after: str, + ) -> None: + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, parse_module(textwrap.dedent(stub.rstrip())) + ) + self.assertCodemod(before, after, context_override=context) + + def run_test_case_with_flags( + self, + stub: str, + before: str, + after: str, + **kwargs: Dict[str, bool], + ) -> None: + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, parse_module(textwrap.dedent(stub.rstrip())) + ) + # Test setting the flag on the codemod instance. + self.assertCodemod(before, after, context_override=context, **kwargs) + + # Test setting the flag when storing the stub in the context. + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, + parse_module(textwrap.dedent(stub.rstrip())), + **kwargs, + ) + self.assertCodemod(before, after, context_override=context) + @data_provider( ( ( @@ -82,11 +117,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) ) def test_merge_module_imports(self, stub: str, before: str, after: str) -> None: - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, parse_module(textwrap.dedent(stub.rstrip())) - ) - self.assertCodemod(before, after, context_override=context) + self.run_simple_test_case(stub=stub, before=before, after=after) @data_provider( ( @@ -751,11 +782,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) ) def test_annotate_functions(self, stub: str, before: str, after: str) -> None: - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, parse_module(textwrap.dedent(stub.rstrip())) - ) - self.assertCodemod(before, after, context_override=context) + self.run_simple_test_case(stub=stub, before=before, after=after) @data_provider( ( @@ -801,11 +828,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) @unittest.skipIf(sys.version_info < (3, 8), "Unsupported Python version") def test_annotate_functions_py38(self, stub: str, before: str, after: str) -> None: - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, parse_module(textwrap.dedent(stub.rstrip())) - ) - self.assertCodemod(before, after, context_override=context) + self.run_simple_test_case(stub=stub, before=before, after=after) @data_provider( ( @@ -827,23 +850,12 @@ class TestApplyAnnotationsVisitor(CodemodTest): def test_annotate_functions_with_existing_annotations( self, stub: str, before: str, after: str ) -> None: - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, parse_module(textwrap.dedent(stub.rstrip())) - ) - # Test setting the overwrite flag on the codemod instance. - self.assertCodemod( - before, after, context_override=context, overwrite_existing_annotations=True - ) - - # Test setting the flag when storing the stub in the context. - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, - parse_module(textwrap.dedent(stub.rstrip())), + self.run_test_case_with_flags( + stub=stub, + before=before, + after=after, overwrite_existing_annotations=True, ) - self.assertCodemod(before, after, context_override=context) @data_provider( ( @@ -908,20 +920,14 @@ class TestApplyAnnotationsVisitor(CodemodTest): def test_annotate_using_incomplete_stubs( self, stub: str, before: str, after: str ) -> None: - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, parse_module(textwrap.dedent(stub.rstrip())) - ) - # Test setting the overwrite flag on the codemod instance. - self.assertCodemod( - before, after, context_override=context, overwrite_existing_annotations=True - ) - - # Test setting the flag when storing the stub in the context. - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, - parse_module(textwrap.dedent(stub.rstrip())), + """ + Ensure that when the stubs are missing annotations where the existing + code has them, we won't remove the existing annotations even when + `overwrite_existing_annotations` is set to `True`. + """ + self.run_test_case_with_flags( + stub=stub, + before=before, + after=after, overwrite_existing_annotations=True, ) - self.assertCodemod(before, after, context_override=context) From 2e6fb89fccd20a53ad6837d88ea9844433be7e77 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 27 Aug 2021 04:06:35 -0400 Subject: [PATCH 130/632] Organize the _apply_type_annotations visitor tests (#517) --- .../tests/test_apply_type_annotations.py | 930 +++++++++--------- 1 file changed, 473 insertions(+), 457 deletions(-) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index eaaeb1c7..ebbc827a 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -54,8 +54,8 @@ class TestApplyAnnotationsVisitor(CodemodTest): self.assertCodemod(before, after, context_override=context) @data_provider( - ( - ( + { + "supported_cases": ( """ from __future__ import annotations from foo import Foo @@ -72,56 +72,101 @@ class TestApplyAnnotationsVisitor(CodemodTest): from baz import Baz """, ), - ( - # Missing feature: ignore aliased imports + "unsupported_cases": ( """ from Foo import foo as bar - """, - """ - from Foo import bar - """, - """ - from Foo import bar - """, - ), - ( - # Missing feature: ignore bare imports - """ import foo + from .. import baz + from boo import * """, """ """, + # This is a bug, it would be better to just ignor aliased + # imports than to add them incorrectly. """ + from Foo import bar """, ), - ( - # Missing feature: ignore relative imports - """ - from .. import foo - """, - """ - """, - """ - """, - ), - ( - # Missing feature: ignore star imports - """ - from foo import * - """, - """ - """, - """ - """, - ), - ) + } ) def test_merge_module_imports(self, stub: str, before: str, after: str) -> None: self.run_simple_test_case(stub=stub, before=before, after=after) @data_provider( - ( - ( + { + "simple": ( + """ + bar: int = ... + """, + """ + bar = foo() + """, + """ + bar: int = foo() + """, + ), + "simple_with_existing": ( + """ + bar: int = ... + """, + """ + bar: str = foo() + """, + """ + bar: str = foo() + """, + ), + "with_separate_declaration": ( + """ + x: int = ... + y: int = ... + z: int = ... + """, + """ + x = y = z = 1 + """, + """ + x: int + y: int + z: int + + x = y = z = 1 + """, + ), + "needs_added_import": ( + """ + FOO: a.b.Example = ... + """, + """ + FOO = bar() + """, + """ + from a.b import Example + + FOO: Example = bar() + """, + ), + "with_generic": ( + """ + FOO: Union[a.b.Example, int] = ... + """, + """ + FOO = bar() + """, + """ + from a.b import Example + + FOO: Union[Example, int] = bar() + """, + ), + } + ) + def test_annotate_globals(self, stub: str, before: str, after: str) -> None: + self.run_simple_test_case(stub=stub, before=before, after=after) + + @data_provider( + { + "basic_return": ( """ def foo() -> int: ... """, @@ -134,7 +179,33 @@ class TestApplyAnnotationsVisitor(CodemodTest): return 1 """, ), - ( + "return_with_existing_param": ( + """ + def foo(x: int) -> str: ... + """, + """ + def foo(x: str): + pass + """, + """ + def foo(x: str) -> str: + pass + """, + ), + "param_with_existng_return": ( + """ + def foo(x: int) -> int: ... + """, + """ + def foo(x) -> int: + return x + """, + """ + def foo(x: int) -> int: + return x + """, + ), + "return_and_params_general": ( """ def foo( b: str, c: int = ..., *, d: str = ..., e: int, f: int = ... @@ -153,7 +224,22 @@ class TestApplyAnnotationsVisitor(CodemodTest): return 1 """, ), - ( + "with_import__basic": ( + """ + def foo() -> bar.Baz: ... + """, + """ + def foo(): + return returns_baz() + """, + """ + from bar import Baz + + def foo() -> Baz: + return returns_baz() + """, + ), + "with_import__unneeded_explicit": ( """ import bar @@ -171,10 +257,8 @@ class TestApplyAnnotationsVisitor(CodemodTest): """, ), # Keep the existing `import A` instead of using `from A import B`. - ( + "with_import__preexisting": ( """ - import bar - def foo() -> bar.Baz: ... """, """ @@ -190,145 +274,23 @@ class TestApplyAnnotationsVisitor(CodemodTest): return returns_baz() """, ), - ( + "with_nested_import": ( """ - def foo() -> int: ... + def foo(x: django.http.response.HttpResponse) -> str: + pass + """, + """ + def foo(x) -> str: + pass + """, + """ + from django.http.response import HttpResponse - class A: - def foo() -> str: ... - """, - """ - def foo(): - return 1 - class A: - def foo(): - return '' - """, - """ - def foo() -> int: - return 1 - class A: - def foo() -> str: - return '' + def foo(x: HttpResponse) -> str: + pass """, ), - ( - """ - bar: int = ... - """, - """ - bar = foo() - """, - """ - bar: int = foo() - """, - ), - ( - """ - bar: int = ... - """, - """ - bar: str = foo() - """, - """ - bar: str = foo() - """, - ), - ( - """ - bar: int = ... - class A: - bar: str = ... - """, - """ - bar = foo() - class A: - bar = foobar() - """, - """ - bar: int = foo() - class A: - bar: str = foobar() - """, - ), - ( - """ - bar: int = ... - class A: - bar: str = ... - """, - """ - bar = foo() - class A: - bar = foobar() - """, - """ - bar: int = foo() - class A: - bar: str = foobar() - """, - ), - ( - """ - a: int = ... - b: str = ... - """, - """ - def foo() -> Tuple[int, str]: - return (1, "") - - a, b = foo() - """, - """ - a: int - b: str - - def foo() -> Tuple[int, str]: - return (1, "") - - a, b = foo() - """, - ), - ( - """ - a: int = ... - b: str = ... - """, - """ - def foo() -> Tuple[int, str]: - return (1, "") - - [a, b] = foo() - """, - """ - a: int - b: str - - def foo() -> Tuple[int, str]: - return (1, "") - - [a, b] = foo() - """, - ), - ( - """ - x: int = ... - y: int = ... - z: int = ... - """, - """ - x = y = z = 1 - """, - """ - x: int - y: int - z: int - - x = y = z = 1 - """, - ), - # Don't add annotations if one is already present - ( + "no_override_existing": ( """ def foo(x: int = 1) -> List[str]: ... """, @@ -345,7 +307,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): return [''] """, ), - ( + "with_typing_import__basic": ( """ from typing import List @@ -362,7 +324,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): return [1] """, ), - ( + "with_typing_import__add_to_preexisting_line": ( """ from typing import List @@ -381,139 +343,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): return [1] """, ), - ( - """ - a: Dict[str, int] = ... - """, - """ - def foo() -> int: - return 1 - a = {} - a['x'] = foo() - """, - """ - def foo() -> int: - return 1 - a: Dict[str, int] = {} - a['x'] = foo() - """, - ), - # Test that tuples with subscripts are handled correctly - # and top level annotations are added in the correct place - ( - """ - a: int = ... - """, - """ - from typing import Tuple - - def foo() -> Tuple[str, int]: - return "", 1 - - b['z'], a = foo() - """, - """ - from typing import Tuple - a: int - - def foo() -> Tuple[str, int]: - return "", 1 - - b['z'], a = foo() - """, - ), - # Don't override existing default parameter values - ( - """ - class B: - def foo(self, x: int = a.b.A.__add__(1), y=None) -> int: ... - """, - """ - class B: - def foo(self, x = A + 1, y = None) -> int: - return x - - """, - """ - class B: - def foo(self, x: int = A + 1, y = None) -> int: - return x - """, - ), - ( - """ - def foo(x: int) -> int: ... - """, - """ - def foo(x) -> int: - return x - """, - """ - def foo(x: int) -> int: - return x - """, - ), - ( - """ - async def a(r: Request, z=None) -> django.http.response.HttpResponse: ... - async def b(r: Request, z=None) -> django.http.response.HttpResponse: ... - async def c(r: Request, z=None) -> django.http.response.HttpResponse: ... - """, - """ - async def a(r: Request, z=None): ... - async def b(r: Request, z=None): ... - async def c(r: Request, z=None): ... - """, - """ - from django.http.response import HttpResponse - - async def a(r: Request, z=None) -> HttpResponse: ... - async def b(r: Request, z=None) -> HttpResponse: ... - async def c(r: Request, z=None) -> HttpResponse: ... - """, - ), - ( - """ - FOO: a.b.Example = ... - """, - """ - FOO = bar() - """, - """ - from a.b import Example - - FOO: Example = bar() - """, - ), - ( - """ - FOO: Union[a.b.Example, int] = ... - """, - """ - FOO = bar() - """, - """ - from a.b import Example - - FOO: Union[Example, int] = bar() - """, - ), - ( - """ - def foo(x: int) -> List[Union[a.b.Example, str]]: ... - """, - """ - def foo(x: int): - return [barfoo(), ""] - """, - """ - from a.b import Example - - def foo(x: int) -> List[Union[Example, str]]: - return [barfoo(), ""] - """, - ), - ( + "add_imports_for_nested_types": ( """ def foo(x: int) -> Optional[a.b.Example]: ... """, @@ -528,20 +358,35 @@ class TestApplyAnnotationsVisitor(CodemodTest): pass """, ), - ( + "UNSUPPORTED_add_imports_for_generics": ( """ - def foo(x: int) -> str: ... + def foo(x: int) -> typing.Optional[Example]: ... """, """ - def foo(x: str): + def foo(x: int): pass """, """ - def foo(x: str) -> str: + def foo(x: int) -> typing.Optional[Example]: pass """, ), - ( + "add_imports_for_doubly_nested_types": ( + """ + def foo(x: int) -> List[Union[a.b.Example, str]]: ... + """, + """ + def foo(x: int): + return [barfoo(), ""] + """, + """ + from a.b import Example + + def foo(x: int) -> List[Union[Example, str]]: + return [barfoo(), ""] + """, + ), + "deeply_nested_example_with_multiline_annotation": ( """ def foo(x: int)-> Union[ Coroutine[Any, Any, django.http.response.HttpResponse], str @@ -561,41 +406,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): pass """, ), - ( - """ - def foo(x: django.http.response.HttpResponse) -> str: - pass - """, - """ - def foo(x) -> str: - pass - """, - """ - from django.http.response import HttpResponse - - def foo(x: HttpResponse) -> str: - pass - """, - ), - ( - """ - def foo() -> b.b.A: ... - """, - """ - from c import A as B, bar - - def foo(): - return bar() - """, - """ - from c import A as B, bar - from b.b import A - - def foo() -> A: - return bar() - """, - ), - ( + "do_not_add_imports_inside_of_Type": ( """ from typing import Type @@ -617,7 +428,66 @@ class TestApplyAnnotationsVisitor(CodemodTest): return A """, ), - ( + "with_async": ( + """ + async def a(r: Request, z=None) -> django.http.response.HttpResponse: ... + async def b(r: Request, z=None) -> django.http.response.HttpResponse: ... + async def c(r: Request, z=None) -> django.http.response.HttpResponse: ... + """, + """ + async def a(r: Request, z=None): ... + async def b(r: Request, z=None): ... + async def c(r: Request, z=None): ... + """, + """ + from django.http.response import HttpResponse + + async def a(r: Request, z=None) -> HttpResponse: ... + async def b(r: Request, z=None) -> HttpResponse: ... + async def c(r: Request, z=None) -> HttpResponse: ... + """, + ), + "async_with_decorators": ( + """ + def async_with_decorators(a: bool, b: bool) -> str: ... + """, + """ + @second_decorator + @first_decorator(5) + async def async_with_decorators(a, b): + return "hello" + """, + """ + @second_decorator + @first_decorator(5) + async def async_with_decorators(a: bool, b: bool) -> str: + return "hello" + """, + ), + # test cases named with the REQUIRES_PREEXISTING prefix are verifying + # that certain special cases work if the stub and the existing code + # happen to align well, but none of these cases are guaranteed to work + # in general - for example duplicate type names will generally result in + # incorrect codemod. + "REQURIES_PREEXISTING_new_import_okay_if_existing_aliased": ( + """ + def foo() -> b.b.A: ... + """, + """ + from c import A as B, bar + + def foo(): + return bar() + """, + """ + from c import A as B, bar + from b.b import A + + def foo() -> A: + return bar() + """, + ), + "REQUIRES_PREEXISTING_fully_qualified_with_alias": ( """ def foo() -> db.Connection: ... """, @@ -632,7 +502,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): return db.Connection() """, ), - ( + "REQURIRES_PREEXISTING_fully_qualified_typing": ( """ def foo() -> typing.Sequence[int]: ... """, @@ -647,69 +517,114 @@ class TestApplyAnnotationsVisitor(CodemodTest): return [] """, ), - # Insert a TypedDict class that is not in the source file. - ( - """ - from mypy_extensions import TypedDict + } + ) + def test_annotate_simple_functions( + self, stub: str, before: str, after: str + ) -> None: + self.run_simple_test_case(stub=stub, before=before, after=after) - class MovieTypedDict(TypedDict): - name: str - year: int + @data_provider( + { + "respect_default_values_1": ( + """ + class B: + def foo(self, x: int = a.b.A.__add__(1), y=None) -> int: ... """, """ - def foo() -> None: - pass + class B: + def foo(self, x = A + 1, y = None) -> int: + return x + """, """ - from mypy_extensions import TypedDict - - class MovieTypedDict(TypedDict): - name: str - year: int - - def foo() -> None: - pass + class B: + def foo(self, x: int = A + 1, y = None) -> int: + return x """, ), - # Insert only the TypedDict class that is not in the source file. - ( + "respect_default_values_2": ( """ - from mypy_extensions import TypedDict + from typing import Optional - class MovieTypedDict(TypedDict): - name: str - year: int - - class ExistingMovieTypedDict(TypedDict): - name: str - year: int + class A: + def foo(self, atticus, b: Optional[int] = None, c: bool = False): ... """, """ - from mypy_extensions import TypedDict - - class ExistingMovieTypedDict(TypedDict): - name: str - year: int - - def foo() -> None: - pass + class A: + def foo(self, atticus, b = None, c = False): ... """, """ - from mypy_extensions import TypedDict + from typing import Optional - class MovieTypedDict(TypedDict): - name: str - year: int - - class ExistingMovieTypedDict(TypedDict): - name: str - year: int - - def foo() -> None: - pass + class A: + def foo(self, atticus, b: Optional[int] = None, c: bool = False): ... """, ), - ( + } + ) + def test_annotate_classes(self, stub: str, before: str, after: str) -> None: + self.run_simple_test_case(stub=stub, before=before, after=after) + + @data_provider( + { + "method_and_function_of_same_name": ( + """ + def foo() -> int: ... + + class A: + def foo() -> str: ... + """, + """ + def foo(): + return 1 + class A: + def foo(): + return '' + """, + """ + def foo() -> int: + return 1 + class A: + def foo() -> str: + return '' + """, + ), + "global_and_attribute_of_same_name": ( + """ + bar: int = ... + class A: + bar: str = ... + """, + """ + bar = foo() + class A: + bar = foobar() + """, + """ + bar: int = foo() + class A: + bar: str = foobar() + """, + ), + "add_global_annotation_simple_case": ( + """ + a: Dict[str, int] = ... + """, + """ + def foo() -> int: + return 1 + a = {} + a['x'] = foo() + """, + """ + def foo() -> int: + return 1 + a: Dict[str, int] = {} + a['x'] = foo() + """, + ), + "add_global_annotation_with_Type__no_added_import": ( """ from typing import Dict @@ -736,33 +651,82 @@ class TestApplyAnnotationsVisitor(CodemodTest): example: Dict[str, Type[foo.Example]] = { "test": foo() } """, ), - ( + "tuple_assign__add_new_top_level_declarations": ( """ - from typing import Optional - - class A: - def foo(self, atticus, b: Optional[int] = None, c: bool = False): ... + a: int = ... + b: str = ... """, """ - class A: - def foo(self, atticus, b = None, c = False): ... + def foo() -> Tuple[int, str]: + return (1, "") + + a, b = foo() """, """ - from typing import Optional + a: int + b: str - class A: - def foo(self, atticus, b: Optional[int] = None, c: bool = False): ... + def foo() -> Tuple[int, str]: + return (1, "") + + a, b = foo() """, ), - # Make sure we handle string annotations well - ( + "list_assign__add_new_top_level_declarations": ( """ + a: int = ... + b: str = ... + """, + """ + def foo() -> Tuple[int, str]: + return (1, "") + + [a, b] = foo() + """, + """ + a: int + b: str + + def foo() -> Tuple[int, str]: + return (1, "") + + [a, b] = foo() + """, + ), + "tuples_with_subscripts__add_new_toplevel_declaration": ( + """ + a: int = ... + """, + """ + from typing import Tuple + + def foo() -> Tuple[str, int]: + return "", 1 + + b['z'], a = foo() + """, + """ + from typing import Tuple + a: int + + def foo() -> Tuple[str, int]: + return "", 1 + + b['z'], a = foo() + """, + ), + "handle_quoted_annotations": ( + """ + bar: "a.b.Example" + def f(x: "typing.Union[int, str]") -> "typing.Union[int, str]": ... class A: def f(self: "A") -> "A": ... """, """ + bar = Example() + def f(x): return x @@ -771,6 +735,8 @@ class TestApplyAnnotationsVisitor(CodemodTest): return self """, """ + bar: "a.b.Example" = Example() + def f(x: "typing.Union[int, str]") -> "typing.Union[int, str]": return x @@ -779,14 +745,81 @@ class TestApplyAnnotationsVisitor(CodemodTest): return self """, ), - ) + } ) - def test_annotate_functions(self, stub: str, before: str, after: str) -> None: + def test_annotate_mixed(self, stub: str, before: str, after: str) -> None: self.run_simple_test_case(stub=stub, before=before, after=after) @data_provider( - ( - ( + { + "insert_new_TypedDict_class_not_in_source_file": ( + """ + from mypy_extensions import TypedDict + + class MovieTypedDict(TypedDict): + name: str + year: int + """, + """ + def foo() -> None: + pass + """, + """ + from mypy_extensions import TypedDict + + class MovieTypedDict(TypedDict): + name: str + year: int + + def foo() -> None: + pass + """, + ), + "insert_only_TypedDict_class_not_already_in_source": ( + """ + from mypy_extensions import TypedDict + + class MovieTypedDict(TypedDict): + name: str + year: int + + class ExistingMovieTypedDict(TypedDict): + name: str + year: int + """, + """ + from mypy_extensions import TypedDict + + class ExistingMovieTypedDict(TypedDict): + name: str + year: int + + def foo() -> None: + pass + """, + """ + from mypy_extensions import TypedDict + + class MovieTypedDict(TypedDict): + name: str + year: int + + class ExistingMovieTypedDict(TypedDict): + name: str + year: int + + def foo() -> None: + pass + """, + ), + } + ) + def test_adding_typed_dicts(self, stub: str, before: str, after: str) -> None: + self.run_simple_test_case(stub=stub, before=before, after=after) + + @data_provider( + { + "required_positional_only_args": ( """ def foo( a: int, /, b: str, c: int = ..., *, d: str = ..., e: int, f: int = ... @@ -805,7 +838,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): return 1 """, ), - ( + "positional_only_arg_with_default_value": ( """ def foo( a: int, b: int = ..., /, c: int = ..., *, d: str = ..., e: int, f: int = ... @@ -824,28 +857,28 @@ class TestApplyAnnotationsVisitor(CodemodTest): return 1 """, ), - ) + } ) @unittest.skipIf(sys.version_info < (3, 8), "Unsupported Python version") def test_annotate_functions_py38(self, stub: str, before: str, after: str) -> None: self.run_simple_test_case(stub=stub, before=before, after=after) @data_provider( - ( - ( + { + "fully_annotated_with_different_stub": ( """ - def fully_annotated_with_different_stub(a: bool, b: bool) -> str: ... + def f(a: bool, b: bool) -> str: ... """, """ - def fully_annotated_with_different_stub(a: int, b: str) -> bool: + def f(a: int, b: str) -> bool: return 'hello' """, """ - def fully_annotated_with_different_stub(a: bool, b: bool) -> str: + def f(a: bool, b: bool) -> str: return 'hello' """, ), - ) + } ) def test_annotate_functions_with_existing_annotations( self, stub: str, before: str, after: str @@ -858,64 +891,47 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) @data_provider( - ( - ( + { + "fully_annotated_with_untyped_stub": ( """ - def fully_annotated_with_untyped_stub(a, b): ... + def f(a, b): ... """, """ - def fully_annotated_with_untyped_stub(a: bool, b: bool) -> str: + def f(a: bool, b: bool) -> str: return "hello" """, """ - def fully_annotated_with_untyped_stub(a: bool, b: bool) -> str: + def f(a: bool, b: bool) -> str: return "hello" """, ), - ( + "params_annotated_with_return_from_stub": ( """ - def params_annotated_with_return_from_stub(a, b) -> str: ... + def f(a, b) -> str: ... """, """ - def params_annotated_with_return_from_stub(a: bool, b: bool): + def f(a: bool, b: bool): return "hello" """, """ - def params_annotated_with_return_from_stub(a: bool, b: bool) -> str: + def f(a: bool, b: bool) -> str: return "hello" """, ), - ( + "partially_annotated_params_with_partial_stub": ( """ - def partially_annotated_params_with_partial_stub(a, b: int): ... + def f(a, b: int): ... """, """ - def partially_annotated_params_with_partial_stub(a: bool, b) -> str: + def f(a: bool, b) -> str: return "hello" """, """ - def partially_annotated_params_with_partial_stub(a: bool, b: int) -> str: + def f(a: bool, b: int) -> str: return "hello" """, ), - ( - """ - def async_with_decorators(a: bool, b: bool) -> str: ... - """, - """ - @second_decorator - @first_decorator(5) - async def async_with_decorators(a, b): - return "hello" - """, - """ - @second_decorator - @first_decorator(5) - async def async_with_decorators(a: bool, b: bool) -> str: - return "hello" - """, - ), - ) + } ) def test_annotate_using_incomplete_stubs( self, stub: str, before: str, after: str From 45c0d96c06a8f20e330e6afd671ff7145a3bab8e Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 7 Sep 2021 11:59:38 +0100 Subject: [PATCH 131/632] master -> main (#521) --- CONTRIBUTING.md | 2 +- README.rst | 10 +++++----- docs/source/conf.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f8eab9f0..ef6fcded 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,7 +9,7 @@ pull requests. ## Pull Requests We actively welcome your pull requests. -1. Fork the repo and create your branch from `master`. +1. Fork the repo and create your branch from `main`. 2. If you've added code that should be tested, add tests. 3. If you've changed APIs, update the documentation. 4. Ensure the test suite passes by `tox test`. diff --git a/README.rst b/README.rst index b603be9c..2ddc245d 100644 --- a/README.rst +++ b/README.rst @@ -11,11 +11,11 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python :alt: Documentation .. |ci-badge| image:: https://github.com/Instagram/LibCST/actions/workflows/build.yml/badge.svg - :target: https://github.com/Instagram/LibCST/actions/workflows/build.yml?query=branch%3Amaster + :target: https://github.com/Instagram/LibCST/actions/workflows/build.yml?query=branch%3Amain :alt: Github Actions -.. |codecov-badge| image:: https://codecov.io/gh/Instagram/LibCST/branch/master/graph/badge.svg - :target: https://codecov.io/gh/Instagram/LibCST/branch/master +.. |codecov-badge| image:: https://codecov.io/gh/Instagram/LibCST/branch/main/graph/badge.svg + :target: https://codecov.io/gh/Instagram/LibCST/branch/main :alt: CodeCov .. |pypi-badge| image:: https://img.shields.io/pypi/v/libcst.svg @@ -28,7 +28,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python .. |notebook-badge| image:: https://img.shields.io/badge/notebook-run-579ACA.svg?logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFkAAABZCAMAAABi1XidAAAB8lBMVEX///9XmsrmZYH1olJXmsr1olJXmsrmZYH1olJXmsr1olJXmsrmZYH1olL1olJXmsr1olJXmsrmZYH1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olJXmsrmZYH1olL1olL0nFf1olJXmsrmZYH1olJXmsq8dZb1olJXmsrmZYH1olJXmspXmspXmsr1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olLeaIVXmsrmZYH1olL1olL1olJXmsrmZYH1olLna31Xmsr1olJXmsr1olJXmsrmZYH1olLqoVr1olJXmsr1olJXmsrmZYH1olL1olKkfaPobXvviGabgadXmsqThKuofKHmZ4Dobnr1olJXmsr1olJXmspXmsr1olJXmsrfZ4TuhWn1olL1olJXmsqBi7X1olJXmspZmslbmMhbmsdemsVfl8ZgmsNim8Jpk8F0m7R4m7F5nLB6jbh7jbiDirOEibOGnKaMhq+PnaCVg6qWg6qegKaff6WhnpKofKGtnomxeZy3noG6dZi+n3vCcpPDcpPGn3bLb4/Mb47UbIrVa4rYoGjdaIbeaIXhoWHmZYHobXvpcHjqdHXreHLroVrsfG/uhGnuh2bwj2Hxk17yl1vzmljzm1j0nlX1olL3AJXWAAAAbXRSTlMAEBAQHx8gICAuLjAwMDw9PUBAQEpQUFBXV1hgYGBkcHBwcXl8gICAgoiIkJCQlJicnJ2goKCmqK+wsLC4usDAwMjP0NDQ1NbW3Nzg4ODi5+3v8PDw8/T09PX29vb39/f5+fr7+/z8/Pz9/v7+zczCxgAABC5JREFUeAHN1ul3k0UUBvCb1CTVpmpaitAGSLSpSuKCLWpbTKNJFGlcSMAFF63iUmRccNG6gLbuxkXU66JAUef/9LSpmXnyLr3T5AO/rzl5zj137p136BISy44fKJXuGN/d19PUfYeO67Znqtf2KH33Id1psXoFdW30sPZ1sMvs2D060AHqws4FHeJojLZqnw53cmfvg+XR8mC0OEjuxrXEkX5ydeVJLVIlV0e10PXk5k7dYeHu7Cj1j+49uKg7uLU61tGLw1lq27ugQYlclHC4bgv7VQ+TAyj5Zc/UjsPvs1sd5cWryWObtvWT2EPa4rtnWW3JkpjggEpbOsPr7F7EyNewtpBIslA7p43HCsnwooXTEc3UmPmCNn5lrqTJxy6nRmcavGZVt/3Da2pD5NHvsOHJCrdc1G2r3DITpU7yic7w/7Rxnjc0kt5GC4djiv2Sz3Fb2iEZg41/ddsFDoyuYrIkmFehz0HR2thPgQqMyQYb2OtB0WxsZ3BeG3+wpRb1vzl2UYBog8FfGhttFKjtAclnZYrRo9ryG9uG/FZQU4AEg8ZE9LjGMzTmqKXPLnlWVnIlQQTvxJf8ip7VgjZjyVPrjw1te5otM7RmP7xm+sK2Gv9I8Gi++BRbEkR9EBw8zRUcKxwp73xkaLiqQb+kGduJTNHG72zcW9LoJgqQxpP3/Tj//c3yB0tqzaml05/+orHLksVO+95kX7/7qgJvnjlrfr2Ggsyx0eoy9uPzN5SPd86aXggOsEKW2Prz7du3VID3/tzs/sSRs2w7ovVHKtjrX2pd7ZMlTxAYfBAL9jiDwfLkq55Tm7ifhMlTGPyCAs7RFRhn47JnlcB9RM5T97ASuZXIcVNuUDIndpDbdsfrqsOppeXl5Y+XVKdjFCTh+zGaVuj0d9zy05PPK3QzBamxdwtTCrzyg/2Rvf2EstUjordGwa/kx9mSJLr8mLLtCW8HHGJc2R5hS219IiF6PnTusOqcMl57gm0Z8kanKMAQg0qSyuZfn7zItsbGyO9QlnxY0eCuD1XL2ys/MsrQhltE7Ug0uFOzufJFE2PxBo/YAx8XPPdDwWN0MrDRYIZF0mSMKCNHgaIVFoBbNoLJ7tEQDKxGF0kcLQimojCZopv0OkNOyWCCg9XMVAi7ARJzQdM2QUh0gmBozjc3Skg6dSBRqDGYSUOu66Zg+I2fNZs/M3/f/Grl/XnyF1Gw3VKCez0PN5IUfFLqvgUN4C0qNqYs5YhPL+aVZYDE4IpUk57oSFnJm4FyCqqOE0jhY2SMyLFoo56zyo6becOS5UVDdj7Vih0zp+tcMhwRpBeLyqtIjlJKAIZSbI8SGSF3k0pA3mR5tHuwPFoa7N7reoq2bqCsAk1HqCu5uvI1n6JuRXI+S1Mco54YmYTwcn6Aeic+kssXi8XpXC4V3t7/ADuTNKaQJdScAAAAAElFTkSuQmCC - :target: https://mybinder.org/v2/gh/Instagram/LibCST/master?filepath=docs%2Fsource%2Ftutorial.ipynb + :target: https://mybinder.org/v2/gh/Instagram/LibCST/main?filepath=docs%2Fsource%2Ftutorial.ipynb :alt: Notebook .. intro-start @@ -52,7 +52,7 @@ You can learn more about `the value that LibCST provides motivations for the project `__ in `our documentation `__. -Try it out with `notebook examples `__. +Try it out with `notebook examples `__. Example expression:: diff --git a/docs/source/conf.py b/docs/source/conf.py index 3eaff6a0..22ca3e8b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -227,6 +227,6 @@ nbsphinx_prolog = r""" Interactive online tutorial: |notebook-badge| .. |notebook-badge| image:: https://img.shields.io/badge/notebook-run-579ACA.svg?logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFkAAABZCAMAAABi1XidAAAB8lBMVEX///9XmsrmZYH1olJXmsr1olJXmsrmZYH1olJXmsr1olJXmsrmZYH1olL1olJXmsr1olJXmsrmZYH1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olJXmsrmZYH1olL1olL0nFf1olJXmsrmZYH1olJXmsq8dZb1olJXmsrmZYH1olJXmspXmspXmsr1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olLeaIVXmsrmZYH1olL1olL1olJXmsrmZYH1olLna31Xmsr1olJXmsr1olJXmsrmZYH1olLqoVr1olJXmsr1olJXmsrmZYH1olL1olKkfaPobXvviGabgadXmsqThKuofKHmZ4Dobnr1olJXmsr1olJXmspXmsr1olJXmsrfZ4TuhWn1olL1olJXmsqBi7X1olJXmspZmslbmMhbmsdemsVfl8ZgmsNim8Jpk8F0m7R4m7F5nLB6jbh7jbiDirOEibOGnKaMhq+PnaCVg6qWg6qegKaff6WhnpKofKGtnomxeZy3noG6dZi+n3vCcpPDcpPGn3bLb4/Mb47UbIrVa4rYoGjdaIbeaIXhoWHmZYHobXvpcHjqdHXreHLroVrsfG/uhGnuh2bwj2Hxk17yl1vzmljzm1j0nlX1olL3AJXWAAAAbXRSTlMAEBAQHx8gICAuLjAwMDw9PUBAQEpQUFBXV1hgYGBkcHBwcXl8gICAgoiIkJCQlJicnJ2goKCmqK+wsLC4usDAwMjP0NDQ1NbW3Nzg4ODi5+3v8PDw8/T09PX29vb39/f5+fr7+/z8/Pz9/v7+zczCxgAABC5JREFUeAHN1ul3k0UUBvCb1CTVpmpaitAGSLSpSuKCLWpbTKNJFGlcSMAFF63iUmRccNG6gLbuxkXU66JAUef/9LSpmXnyLr3T5AO/rzl5zj137p136BISy44fKJXuGN/d19PUfYeO67Znqtf2KH33Id1psXoFdW30sPZ1sMvs2D060AHqws4FHeJojLZqnw53cmfvg+XR8mC0OEjuxrXEkX5ydeVJLVIlV0e10PXk5k7dYeHu7Cj1j+49uKg7uLU61tGLw1lq27ugQYlclHC4bgv7VQ+TAyj5Zc/UjsPvs1sd5cWryWObtvWT2EPa4rtnWW3JkpjggEpbOsPr7F7EyNewtpBIslA7p43HCsnwooXTEc3UmPmCNn5lrqTJxy6nRmcavGZVt/3Da2pD5NHvsOHJCrdc1G2r3DITpU7yic7w/7Rxnjc0kt5GC4djiv2Sz3Fb2iEZg41/ddsFDoyuYrIkmFehz0HR2thPgQqMyQYb2OtB0WxsZ3BeG3+wpRb1vzl2UYBog8FfGhttFKjtAclnZYrRo9ryG9uG/FZQU4AEg8ZE9LjGMzTmqKXPLnlWVnIlQQTvxJf8ip7VgjZjyVPrjw1te5otM7RmP7xm+sK2Gv9I8Gi++BRbEkR9EBw8zRUcKxwp73xkaLiqQb+kGduJTNHG72zcW9LoJgqQxpP3/Tj//c3yB0tqzaml05/+orHLksVO+95kX7/7qgJvnjlrfr2Ggsyx0eoy9uPzN5SPd86aXggOsEKW2Prz7du3VID3/tzs/sSRs2w7ovVHKtjrX2pd7ZMlTxAYfBAL9jiDwfLkq55Tm7ifhMlTGPyCAs7RFRhn47JnlcB9RM5T97ASuZXIcVNuUDIndpDbdsfrqsOppeXl5Y+XVKdjFCTh+zGaVuj0d9zy05PPK3QzBamxdwtTCrzyg/2Rvf2EstUjordGwa/kx9mSJLr8mLLtCW8HHGJc2R5hS219IiF6PnTusOqcMl57gm0Z8kanKMAQg0qSyuZfn7zItsbGyO9QlnxY0eCuD1XL2ys/MsrQhltE7Ug0uFOzufJFE2PxBo/YAx8XPPdDwWN0MrDRYIZF0mSMKCNHgaIVFoBbNoLJ7tEQDKxGF0kcLQimojCZopv0OkNOyWCCg9XMVAi7ARJzQdM2QUh0gmBozjc3Skg6dSBRqDGYSUOu66Zg+I2fNZs/M3/f/Grl/XnyF1Gw3VKCez0PN5IUfFLqvgUN4C0qNqYs5YhPL+aVZYDE4IpUk57oSFnJm4FyCqqOE0jhY2SMyLFoo56zyo6becOS5UVDdj7Vih0zp+tcMhwRpBeLyqtIjlJKAIZSbI8SGSF3k0pA3mR5tHuwPFoa7N7reoq2bqCsAk1HqCu5uvI1n6JuRXI+S1Mco54YmYTwcn6Aeic+kssXi8XpXC4V3t7/ADuTNKaQJdScAAAAAElFTkSuQmCC - :target: https://mybinder.org/v2/gh/Instagram/LibCST/master?filepath={{ docname }} + :target: https://mybinder.org/v2/gh/Instagram/LibCST/main?filepath={{ docname }} :alt: Notebook """ From 69156c77759c815a975a5f77240c099e214c4914 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Tue, 7 Sep 2021 07:08:46 -0400 Subject: [PATCH 132/632] Bump pyre-check to latest version (#516) The current version of the pyre query api requires a running watchman server, which has to be started independently. --- libcst/tests/pyre/simple_class.json | 8 ++++---- libcst/tests/test_pyre_integration.py | 2 +- requirements-dev.txt | 2 +- setup.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/libcst/tests/pyre/simple_class.json b/libcst/tests/pyre/simple_class.json index dc621ff3..b9424b05 100644 --- a/libcst/tests/pyre/simple_class.json +++ b/libcst/tests/pyre/simple_class.json @@ -37,7 +37,7 @@ "column": 16 } }, - "annotation": "typing.Callable(libcst.tests.pyre.simple_class.Item.__init__)[[Named(self, unknown), Named(n, int)], None]" + "annotation": "typing.Callable(libcst.tests.pyre.simple_class.Item.__init__)[[Named(self, libcst.tests.pyre.simple_class.Item), Named(n, int)], None]" }, { "location": { @@ -167,7 +167,7 @@ "column": 17 } }, - "annotation": "typing.Callable(libcst.tests.pyre.simple_class.ItemCollector.get_items)[[Named(self, unknown), Named(n, int)], typing.Sequence[libcst.tests.pyre.simple_class.Item]]" + "annotation": "typing.Callable(libcst.tests.pyre.simple_class.ItemCollector.get_items)[[Named(self, libcst.tests.pyre.simple_class.ItemCollector), Named(n, int)], typing.Sequence[libcst.tests.pyre.simple_class.Item]]" }, { "location": { @@ -219,7 +219,7 @@ "column": 43 } }, - "annotation": "typing.Callable(typing.GenericMeta.__getitem__)[[typing.Type[Variable[typing._T_co](covariant)]], typing.Type[typing.Sequence[Variable[typing._T_co](covariant)]]]" + "annotation": "BoundMethod[typing.Callable(typing.GenericMeta.__getitem__)[[Named(self, unknown), typing.Type[Variable[typing._T_co](covariant)]], typing.Type[typing.Sequence[Variable[typing._T_co](covariant)]]], typing.Type[typing.Sequence]]" }, { "location": { @@ -427,7 +427,7 @@ "column": 43 } }, - "annotation": "typing.Callable(libcst.tests.pyre.simple_class.ItemCollector.get_items)[[Named(n, int)], typing.Sequence[libcst.tests.pyre.simple_class.Item]]" + "annotation": "BoundMethod[typing.Callable(libcst.tests.pyre.simple_class.ItemCollector.get_items)[[Named(self, libcst.tests.pyre.simple_class.ItemCollector), Named(n, int)], typing.Sequence[libcst.tests.pyre.simple_class.Item]], libcst.tests.pyre.simple_class.ItemCollector]" }, { "location": { diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 98675787..98300bce 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -122,7 +122,7 @@ if __name__ == "__main__": stdout: str stderr: str return_code: int - stdout, stderr, return_code = run_command(["pyre", "start"]) + stdout, stderr, return_code = run_command(["pyre", "start", "--no-watchman"]) if return_code != 0: print(stdout) print(stderr) diff --git a/requirements-dev.txt b/requirements-dev.txt index 5efb0607..bc7688bf 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,7 +8,7 @@ hypothesmith>=0.0.4 jupyter>=1.0.0 nbsphinx>=0.4.2 prompt-toolkit>=2.0.9 -pyre-check==0.0.41 +pyre-check==0.9.3 setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 tox>=3.18.1 diff --git a/setup.py b/setup.py index 28a404ac..f083f31b 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ import setuptools # Grab the readme so that our package stays in sync with github. this_directory: str = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.rst"), encoding="utf-8") as f: - long_description = f.read() + long_description: str = f.read() setuptools.setup( use_scm_version={ From fe0dc1b8a411970526808f25a4d231c0ea60c668 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Tue, 7 Sep 2021 07:23:37 -0400 Subject: [PATCH 133/632] Improve the tests of annotating async / decorator functions (#518) The existing two tests didn't make it clear what exactly we wanted to verify, which is two things: - that we can successfully annotate async functions with decorators - that it doesn't matter whether or not the async and decorator information is part of the stubs - we need it to be permissible because a "real" stubs file would have this, but stubs generated by tools like pyre infer shouldn't need to care, they only really need to care about types --- .../tests/test_apply_type_annotations.py | 39 +++++++++++-------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index ebbc827a..4a93b082 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -428,40 +428,47 @@ class TestApplyAnnotationsVisitor(CodemodTest): return A """, ), - "with_async": ( + # The following two tests verify that we can annotate functions + # with async and decorator information, regardless of whether this + # is part of the stub file. + "async_with_decorators__full_stub": ( """ - async def a(r: Request, z=None) -> django.http.response.HttpResponse: ... - async def b(r: Request, z=None) -> django.http.response.HttpResponse: ... - async def c(r: Request, z=None) -> django.http.response.HttpResponse: ... + @second_decorator + @first_decorator(5) + async def async_with_decorators(r: Request, b: bool) -> django.http.response.HttpResponse: ... """, """ - async def a(r: Request, z=None): ... - async def b(r: Request, z=None): ... - async def c(r: Request, z=None): ... + @second_decorator + @first_decorator(5) + async def async_with_decorators(r, b): + return respond(r, b) """, """ from django.http.response import HttpResponse - async def a(r: Request, z=None) -> HttpResponse: ... - async def b(r: Request, z=None) -> HttpResponse: ... - async def c(r: Request, z=None) -> HttpResponse: ... + @second_decorator + @first_decorator(5) + async def async_with_decorators(r: Request, b: bool) -> HttpResponse: + return respond(r, b) """, ), - "async_with_decorators": ( + "async_with_decorators__bare_stub": ( """ - def async_with_decorators(a: bool, b: bool) -> str: ... + def async_with_decorators(r: Request, b: bool) -> django.http.response.HttpResponse: ... """, """ @second_decorator @first_decorator(5) - async def async_with_decorators(a, b): - return "hello" + async def async_with_decorators(r, b): + return respond(r, b) """, """ + from django.http.response import HttpResponse + @second_decorator @first_decorator(5) - async def async_with_decorators(a: bool, b: bool) -> str: - return "hello" + async def async_with_decorators(r: Request, b: bool) -> HttpResponse: + return respond(r, b) """, ), # test cases named with the REQUIRES_PREEXISTING prefix are verifying From 683731b1e1bbd86f7787d48a2c1aba2daf0f2e61 Mon Sep 17 00:00:00 2001 From: Rodrigo Zhou Date: Thu, 16 Sep 2021 09:59:24 -0700 Subject: [PATCH 134/632] Fix pyre command for type inference provider (#523) * Fix pyre command for type inference provider * fix integration test Co-authored-by: Zsolt Dollenstein --- .../tests/test_type_inference_provider.py | 43 ++++++++++++++--- libcst/metadata/type_inference_provider.py | 2 +- libcst/tests/pyre/.pyre_configuration | 7 +++ libcst/tests/pyre/simple_class.json | 46 +++++++++---------- libcst/tests/test_pyre_integration.py | 2 + 5 files changed, 69 insertions(+), 31 deletions(-) create mode 100644 libcst/tests/pyre/.pyre_configuration diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index e7cad72a..164367b9 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -5,7 +5,11 @@ import json +import os +import subprocess +import sys from pathlib import Path +from unittest import skipIf import libcst as cst from libcst import MetadataWrapper @@ -35,24 +39,49 @@ def _test_simple_class_helper(test: UnitTest, wrapper: MetadataWrapper) -> None: test.assertEqual(types[value], "int") # self - test.assertEqual( - types[self_number_attr.value], "libcst.tests.pyre.simple_class.Item" - ) + test.assertEqual(types[self_number_attr.value], "simple_class.Item") collector_assign = cst.ensure_type( cst.ensure_type(m.body[3], cst.SimpleStatementLine).body[0], cst.Assign ) collector = collector_assign.targets[0].target - test.assertEqual(types[collector], "libcst.tests.pyre.simple_class.ItemCollector") + test.assertEqual(types[collector], "simple_class.ItemCollector") items_assign = cst.ensure_type( cst.ensure_type(m.body[4], cst.SimpleStatementLine).body[0], cst.AnnAssign ) items = items_assign.target - test.assertEqual( - types[items], "typing.Sequence[libcst.tests.pyre.simple_class.Item]" - ) + test.assertEqual(types[items], "typing.Sequence[simple_class.Item]") +@skipIf( + sys.version_info < (3, 7), "TypeInferenceProvider doesn't support 3.6 and below" +) +@skipIf(sys.platform == "win32", "TypeInferenceProvider doesn't support windows") class TypeInferenceProviderTest(UnitTest): + @classmethod + def setUpClass(cls): + os.chdir(TEST_SUITE_PATH) + try: + subprocess.run(["pyre", "-n", "start", "--no-watchman"]) + except subprocess.TimeoutExpired as exc: + raise exc + + @classmethod + def tearDownClass(cls): + try: + subprocess.run(["pyre", "-n", "stop"], cwd=TEST_SUITE_PATH) + except subprocess.TimeoutExpired as exc: + raise exc + + @data_provider( + ((TEST_SUITE_PATH / "simple_class.py", TEST_SUITE_PATH / "simple_class.json"),) + ) + def test_gen_cache(self, source_path: Path, data_path: Path) -> None: + cache = TypeInferenceProvider.gen_cache( + root_path=source_path.parent, paths=[source_path.name], timeout=None + ) + data: PyreData = json.loads(data_path.read_text()) + self.assertEqual(cache[source_path.name], data) + @data_provider( ((TEST_SUITE_PATH / "simple_class.py", TEST_SUITE_PATH / "simple_class.json"),) ) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index 4924738e..3ce4d02c 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -57,7 +57,7 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): root_path: Path, paths: List[str], timeout: Optional[int] ) -> Mapping[str, object]: params = ",".join(f"path='{root_path / path}'" for path in paths) - cmd_args = ["pyre", "--noninteractive", "query", f'"types({params})"'] + cmd_args = ["pyre", "--noninteractive", "query", f"types({params})"] try: stdout, stderr, return_code = run_command(cmd_args, timeout=timeout) except subprocess.TimeoutExpired as exc: diff --git a/libcst/tests/pyre/.pyre_configuration b/libcst/tests/pyre/.pyre_configuration new file mode 100644 index 00000000..e6deea0f --- /dev/null +++ b/libcst/tests/pyre/.pyre_configuration @@ -0,0 +1,7 @@ +{ + "source_directories": [ + "." + ], + "search_path": [], + "workers": 1 +} diff --git a/libcst/tests/pyre/simple_class.json b/libcst/tests/pyre/simple_class.json index b9424b05..21b48333 100644 --- a/libcst/tests/pyre/simple_class.json +++ b/libcst/tests/pyre/simple_class.json @@ -24,7 +24,7 @@ "column": 10 } }, - "annotation": "typing.Type[libcst.tests.pyre.simple_class.Item]" + "annotation": "typing.Type[simple_class.Item]" }, { "location": { @@ -37,7 +37,7 @@ "column": 16 } }, - "annotation": "typing.Callable(libcst.tests.pyre.simple_class.Item.__init__)[[Named(self, libcst.tests.pyre.simple_class.Item), Named(n, int)], None]" + "annotation": "typing.Callable(simple_class.Item.__init__)[[Named(self, simple_class.Item), Named(n, int)], None]" }, { "location": { @@ -50,7 +50,7 @@ "column": 21 } }, - "annotation": "libcst.tests.pyre.simple_class.Item" + "annotation": "simple_class.Item" }, { "location": { @@ -102,7 +102,7 @@ "column": 12 } }, - "annotation": "libcst.tests.pyre.simple_class.Item" + "annotation": "simple_class.Item" }, { "location": { @@ -154,7 +154,7 @@ "column": 19 } }, - "annotation": "typing.Type[libcst.tests.pyre.simple_class.ItemCollector]" + "annotation": "typing.Type[simple_class.ItemCollector]" }, { "location": { @@ -167,7 +167,7 @@ "column": 17 } }, - "annotation": "typing.Callable(libcst.tests.pyre.simple_class.ItemCollector.get_items)[[Named(self, libcst.tests.pyre.simple_class.ItemCollector), Named(n, int)], typing.Sequence[libcst.tests.pyre.simple_class.Item]]" + "annotation": "typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]]" }, { "location": { @@ -180,7 +180,7 @@ "column": 22 } }, - "annotation": "libcst.tests.pyre.simple_class.ItemCollector" + "annotation": "simple_class.ItemCollector" }, { "location": { @@ -232,7 +232,7 @@ "column": 49 } }, - "annotation": "typing.Type[typing.Sequence[libcst.tests.pyre.simple_class.Item]]" + "annotation": "typing.Type[typing.Sequence[simple_class.Item]]" }, { "location": { @@ -245,7 +245,7 @@ "column": 48 } }, - "annotation": "typing.Type[libcst.tests.pyre.simple_class.Item]" + "annotation": "typing.Type[simple_class.Item]" }, { "location": { @@ -258,7 +258,7 @@ "column": 42 } }, - "annotation": "typing.List[libcst.tests.pyre.simple_class.Item]" + "annotation": "typing.List[simple_class.Item]" }, { "location": { @@ -271,7 +271,7 @@ "column": 20 } }, - "annotation": "typing.Type[libcst.tests.pyre.simple_class.Item]" + "annotation": "typing.Type[simple_class.Item]" }, { "location": { @@ -284,7 +284,7 @@ "column": 23 } }, - "annotation": "libcst.tests.pyre.simple_class.Item" + "annotation": "simple_class.Item" }, { "location": { @@ -349,7 +349,7 @@ "column": 9 } }, - "annotation": "libcst.tests.pyre.simple_class.ItemCollector" + "annotation": "simple_class.ItemCollector" }, { "location": { @@ -362,7 +362,7 @@ "column": 25 } }, - "annotation": "typing.Type[libcst.tests.pyre.simple_class.ItemCollector]" + "annotation": "typing.Type[simple_class.ItemCollector]" }, { "location": { @@ -375,7 +375,7 @@ "column": 27 } }, - "annotation": "libcst.tests.pyre.simple_class.ItemCollector" + "annotation": "simple_class.ItemCollector" }, { "location": { @@ -388,7 +388,7 @@ "column": 5 } }, - "annotation": "typing.Sequence[libcst.tests.pyre.simple_class.Item]" + "annotation": "typing.Sequence[simple_class.Item]" }, { "location": { @@ -401,7 +401,7 @@ "column": 21 } }, - "annotation": "typing.Type[typing.Sequence[libcst.tests.pyre.simple_class.Item]]" + "annotation": "typing.Type[typing.Sequence[simple_class.Item]]" }, { "location": { @@ -414,7 +414,7 @@ "column": 33 } }, - "annotation": "libcst.tests.pyre.simple_class.ItemCollector" + "annotation": "simple_class.ItemCollector" }, { "location": { @@ -427,7 +427,7 @@ "column": 43 } }, - "annotation": "BoundMethod[typing.Callable(libcst.tests.pyre.simple_class.ItemCollector.get_items)[[Named(self, libcst.tests.pyre.simple_class.ItemCollector), Named(n, int)], typing.Sequence[libcst.tests.pyre.simple_class.Item]], libcst.tests.pyre.simple_class.ItemCollector]" + "annotation": "BoundMethod[typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]], simple_class.ItemCollector]" }, { "location": { @@ -440,7 +440,7 @@ "column": 46 } }, - "annotation": "typing.Sequence[libcst.tests.pyre.simple_class.Item]" + "annotation": "typing.Sequence[simple_class.Item]" }, { "location": { @@ -466,7 +466,7 @@ "column": 8 } }, - "annotation": "libcst.tests.pyre.simple_class.Item" + "annotation": "simple_class.Item" }, { "location": { @@ -479,7 +479,7 @@ "column": 17 } }, - "annotation": "typing.Sequence[libcst.tests.pyre.simple_class.Item]" + "annotation": "typing.Sequence[simple_class.Item]" }, { "location": { @@ -492,7 +492,7 @@ "column": 8 } }, - "annotation": "libcst.tests.pyre.simple_class.Item" + "annotation": "simple_class.Item" }, { "location": { diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 98300bce..e42e08e2 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -5,6 +5,7 @@ import json +import os from pathlib import Path from typing import Dict, List, Mapping, Optional, Tuple, Union @@ -122,6 +123,7 @@ if __name__ == "__main__": stdout: str stderr: str return_code: int + os.chdir(TEST_SUITE_PATH) stdout, stderr, return_code = run_command(["pyre", "start", "--no-watchman"]) if return_code != 0: print(stdout) From a20d43e7e64ee006d5d23deafe0a6e25db904c22 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Tue, 21 Sep 2021 04:53:17 -0400 Subject: [PATCH 135/632] Rewrite the MatchIfTrue type to be generic on _MatchIfTrueT (#512) --- libcst/codegen/gen_matcher_classes.py | 36 +- libcst/matchers/__init__.py | 1275 +++++++++++-------------- libcst/matchers/_matcher_base.py | 70 +- libcst/matchers/_visitors.py | 12 +- 4 files changed, 581 insertions(+), 812 deletions(-) diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index b3475570..59fb251e 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -121,32 +121,14 @@ def _get_match_if_true(oldtype: cst.BaseExpression) -> cst.SubscriptElement: slice=( cst.SubscriptElement( cst.Index( - cst.Subscript( - cst.Name("Callable"), - slice=( - cst.SubscriptElement( - cst.Index( - cst.List( - [ - cst.Element( - # MatchIfTrue takes in the original node type, - # and returns a boolean. So, lets convert our - # quoted classes (forward refs to other - # matchers) back to the CSTNode they refer to. - # We can do this because there's always a 1:1 - # name mapping. - _convert_match_nodes_to_cst_nodes( - oldtype - ) - ) - ] - ) - ) - ), - cst.SubscriptElement(cst.Index(cst.Name("bool"))), - ), - ) - ) + # MatchIfTrue takes in the original node type, + # and returns a boolean. So, lets convert our + # quoted classes (forward refs to other + # matchers) back to the CSTNode they refer to. + # We can do this because there's always a 1:1 + # name mapping. + _convert_match_nodes_to_cst_nodes(oldtype) + ), ), ), ) @@ -459,7 +441,7 @@ generated_code.append("") generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes") generated_code.append("from dataclasses import dataclass") -generated_code.append("from typing import Callable, Optional, Sequence, Union") +generated_code.append("from typing import Optional, Sequence, Union") generated_code.append("from typing_extensions import Literal") generated_code.append("import libcst as cst") generated_code.append("") diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 907c6ff3..d00e8f42 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -6,7 +6,7 @@ # This file was generated by libcst.codegen.gen_matcher_classes from dataclasses import dataclass -from typing import Callable, Optional, Sequence, Union +from typing import Optional, Sequence, Union from typing_extensions import Literal @@ -153,7 +153,7 @@ MetadataMatchType = Union[MatchMetadata, MatchMetadataIfTrue] BaseParenthesizableWhitespaceMatchType = Union[ "BaseParenthesizableWhitespace", MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseParenthesizableWhitespace], bool]], + MatchIfTrue[cst.BaseParenthesizableWhitespace], ] @@ -226,17 +226,15 @@ class And(BaseBooleanOp, BaseMatcherNode): BaseAssignTargetExpressionMatchType = Union[ "BaseAssignTargetExpression", MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseAssignTargetExpression], bool]], + MatchIfTrue[cst.BaseAssignTargetExpression], ] AnnotationMatchType = Union[ - "Annotation", MetadataMatchType, MatchIfTrue[Callable[[cst.Annotation], bool]] + "Annotation", MetadataMatchType, MatchIfTrue[cst.Annotation] ] AssignEqualMatchType = Union[ - "AssignEqual", MetadataMatchType, MatchIfTrue[Callable[[cst.AssignEqual], bool]] -] -SemicolonMatchType = Union[ - "Semicolon", MetadataMatchType, MatchIfTrue[Callable[[cst.Semicolon], bool]] + "AssignEqual", MetadataMatchType, MatchIfTrue[cst.AssignEqual] ] +SemicolonMatchType = Union["Semicolon", MetadataMatchType, MatchIfTrue[cst.Semicolon]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -256,20 +254,20 @@ class AnnAssign(BaseSmallStatement, BaseMatcherNode): value: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() @@ -294,9 +292,7 @@ class AnnAssign(BaseSmallStatement, BaseMatcherNode): BaseExpressionMatchType = Union[ - "BaseExpression", - MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseExpression], bool]], + "BaseExpression", MetadataMatchType, MatchIfTrue[cst.BaseExpression] ] @@ -328,9 +324,7 @@ class Annotation(BaseMatcherNode): ] = DoNotCare() -CommaMatchType = Union[ - "Comma", MetadataMatchType, MatchIfTrue[Callable[[cst.Comma], bool]] -] +CommaMatchType = Union["Comma", MetadataMatchType, MatchIfTrue[cst.Comma]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -344,21 +338,13 @@ class Arg(BaseMatcherNode): keyword: Union[ Optional["Name"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Name]], bool]], + MatchIfTrue[Optional[cst.Name]], DoNotCareSentinel, OneOf[ - Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Name]], bool]], - ] + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] ], AllOf[ - Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Name]], bool]], - ] + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] ], ] = DoNotCare() equal: Union[ @@ -373,20 +359,20 @@ class Arg(BaseMatcherNode): star: Union[ Literal["", "*", "**"], MetadataMatchType, - MatchIfTrue[Callable[[Literal["", "*", "**"]], bool]], + MatchIfTrue[Literal["", "*", "**"]], DoNotCareSentinel, OneOf[ Union[ Literal["", "*", "**"], MetadataMatchType, - MatchIfTrue[Callable[[Literal["", "*", "**"]], bool]], + MatchIfTrue[Literal["", "*", "**"]], ] ], AllOf[ Union[ Literal["", "*", "**"], MetadataMatchType, - MatchIfTrue[Callable[[Literal["", "*", "**"]], bool]], + MatchIfTrue[Literal["", "*", "**"]], ] ], ] = DoNotCare() @@ -415,7 +401,7 @@ NameOrTupleOrListMatchType = Union[ "Tuple", "List", MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Name, cst.Tuple, cst.List]], bool]], + MatchIfTrue[Union[cst.Name, cst.Tuple, cst.List]], ] @@ -448,9 +434,7 @@ class AsName(BaseMatcherNode): SimpleWhitespaceMatchType = Union[ - "SimpleWhitespace", - MetadataMatchType, - MatchIfTrue[Callable[[cst.SimpleWhitespace], bool]], + "SimpleWhitespace", MetadataMatchType, MatchIfTrue[cst.SimpleWhitespace] ] @@ -465,20 +449,20 @@ class Assert(BaseSmallStatement, BaseMatcherNode): msg: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() @@ -506,7 +490,7 @@ class Assert(BaseSmallStatement, BaseMatcherNode): AssignTargetMatchType = Union[ - "AssignTarget", MetadataMatchType, MatchIfTrue[Callable[[cst.AssignTarget], bool]] + "AssignTarget", MetadataMatchType, MatchIfTrue[cst.AssignTarget] ] @@ -538,7 +522,7 @@ class Assign(BaseSmallStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.AssignTarget]], bool]], + MatchIfTrue[Sequence[cst.AssignTarget]], OneOf[ Union[ Sequence[ @@ -562,7 +546,7 @@ class Assign(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.AssignTarget]], bool]], + MatchIfTrue[Sequence[cst.AssignTarget]], ] ], AllOf[ @@ -588,7 +572,7 @@ class Assign(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.AssignTarget]], bool]], + MatchIfTrue[Sequence[cst.AssignTarget]], ] ], ] = DoNotCare() @@ -678,15 +662,11 @@ class Asynchronous(BaseMatcherNode): ] = DoNotCare() -NameMatchType = Union[ - "Name", MetadataMatchType, MatchIfTrue[Callable[[cst.Name], bool]] -] -DotMatchType = Union["Dot", MetadataMatchType, MatchIfTrue[Callable[[cst.Dot], bool]]] -LeftParenMatchType = Union[ - "LeftParen", MetadataMatchType, MatchIfTrue[Callable[[cst.LeftParen], bool]] -] +NameMatchType = Union["Name", MetadataMatchType, MatchIfTrue[cst.Name]] +DotMatchType = Union["Dot", MetadataMatchType, MatchIfTrue[cst.Dot]] +LeftParenMatchType = Union["LeftParen", MetadataMatchType, MatchIfTrue[cst.LeftParen]] RightParenMatchType = Union[ - "RightParen", MetadataMatchType, MatchIfTrue[Callable[[cst.RightParen], bool]] + "RightParen", MetadataMatchType, MatchIfTrue[cst.RightParen] ] @@ -732,7 +712,7 @@ class Attribute( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -756,7 +736,7 @@ class Attribute( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -782,7 +762,7 @@ class Attribute( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -812,7 +792,7 @@ class Attribute( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -836,7 +816,7 @@ class Attribute( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -862,7 +842,7 @@ class Attribute( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -874,9 +854,7 @@ class Attribute( ] = DoNotCare() -BaseAugOpMatchType = Union[ - "BaseAugOp", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseAugOp], bool]] -] +BaseAugOpMatchType = Union["BaseAugOp", MetadataMatchType, MatchIfTrue[cst.BaseAugOp]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -947,7 +925,7 @@ class Await(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -971,7 +949,7 @@ class Await(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -997,7 +975,7 @@ class Await(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -1027,7 +1005,7 @@ class Await(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -1051,7 +1029,7 @@ class Await(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -1077,7 +1055,7 @@ class Await(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -1096,7 +1074,7 @@ class Await(BaseExpression, BaseMatcherNode): BaseBinaryOpMatchType = Union[ - "BaseBinaryOp", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseBinaryOp], bool]] + "BaseBinaryOp", MetadataMatchType, MatchIfTrue[cst.BaseBinaryOp] ] @@ -1146,7 +1124,7 @@ class BinaryOperation(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -1170,7 +1148,7 @@ class BinaryOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -1196,7 +1174,7 @@ class BinaryOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -1226,7 +1204,7 @@ class BinaryOperation(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -1250,7 +1228,7 @@ class BinaryOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -1276,7 +1254,7 @@ class BinaryOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -1437,7 +1415,7 @@ class BitXorAssign(BaseAugOp, BaseMatcherNode): BaseBooleanOpMatchType = Union[ - "BaseBooleanOp", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseBooleanOp], bool]] + "BaseBooleanOp", MetadataMatchType, MatchIfTrue[cst.BaseBooleanOp] ] @@ -1487,7 +1465,7 @@ class BooleanOperation(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -1511,7 +1489,7 @@ class BooleanOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -1537,7 +1515,7 @@ class BooleanOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -1567,7 +1545,7 @@ class BooleanOperation(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -1591,7 +1569,7 @@ class BooleanOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -1617,7 +1595,7 @@ class BooleanOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -1645,7 +1623,7 @@ class Break(BaseSmallStatement, BaseMatcherNode): ] = DoNotCare() -ArgMatchType = Union["Arg", MetadataMatchType, MatchIfTrue[Callable[[cst.Arg], bool]]] +ArgMatchType = Union["Arg", MetadataMatchType, MatchIfTrue[cst.Arg]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -1682,7 +1660,7 @@ class Call(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], OneOf[ Union[ Sequence[ @@ -1702,7 +1680,7 @@ class Call(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], ] ], AllOf[ @@ -1724,7 +1702,7 @@ class Call(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], ] ], ] = DoNotCare() @@ -1754,7 +1732,7 @@ class Call(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -1778,7 +1756,7 @@ class Call(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -1804,7 +1782,7 @@ class Call(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -1834,7 +1812,7 @@ class Call(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -1858,7 +1836,7 @@ class Call(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -1884,7 +1862,7 @@ class Call(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -1908,15 +1886,9 @@ class Call(BaseExpression, BaseMatcherNode): ] = DoNotCare() -BaseSuiteMatchType = Union[ - "BaseSuite", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseSuite], bool]] -] -DecoratorMatchType = Union[ - "Decorator", MetadataMatchType, MatchIfTrue[Callable[[cst.Decorator], bool]] -] -EmptyLineMatchType = Union[ - "EmptyLine", MetadataMatchType, MatchIfTrue[Callable[[cst.EmptyLine], bool]] -] +BaseSuiteMatchType = Union["BaseSuite", MetadataMatchType, MatchIfTrue[cst.BaseSuite]] +DecoratorMatchType = Union["Decorator", MetadataMatchType, MatchIfTrue[cst.Decorator]] +EmptyLineMatchType = Union["EmptyLine", MetadataMatchType, MatchIfTrue[cst.EmptyLine]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -1956,7 +1928,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], OneOf[ Union[ Sequence[ @@ -1976,7 +1948,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], ] ], AllOf[ @@ -1998,7 +1970,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], ] ], ] = DoNotCare() @@ -2028,7 +2000,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], OneOf[ Union[ Sequence[ @@ -2048,7 +2020,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], ] ], AllOf[ @@ -2070,7 +2042,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Arg]], bool]], + MatchIfTrue[Sequence[cst.Arg]], ] ], ] = DoNotCare() @@ -2100,7 +2072,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Decorator]], bool]], + MatchIfTrue[Sequence[cst.Decorator]], OneOf[ Union[ Sequence[ @@ -2124,7 +2096,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Decorator]], bool]], + MatchIfTrue[Sequence[cst.Decorator]], ] ], AllOf[ @@ -2150,7 +2122,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Decorator]], bool]], + MatchIfTrue[Sequence[cst.Decorator]], ] ], ] = DoNotCare() @@ -2192,7 +2164,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -2216,7 +2188,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -2242,7 +2214,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -2272,7 +2244,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -2296,7 +2268,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -2322,7 +2294,7 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -2396,7 +2368,7 @@ class Comma(BaseMatcherNode): ] = DoNotCare() -strMatchType = Union[str, MetadataMatchType, MatchIfTrue[Callable[[str], bool]]] +strMatchType = Union[str, MetadataMatchType, MatchIfTrue[str]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -2412,9 +2384,7 @@ class Comment(BaseMatcherNode): ] = DoNotCare() -CompIfMatchType = Union[ - "CompIf", MetadataMatchType, MatchIfTrue[Callable[[cst.CompIf], bool]] -] +CompIfMatchType = Union["CompIf", MetadataMatchType, MatchIfTrue[cst.CompIf]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -2457,7 +2427,7 @@ class CompFor(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.CompIf]], bool]], + MatchIfTrue[Sequence[cst.CompIf]], OneOf[ Union[ Sequence[ @@ -2481,7 +2451,7 @@ class CompFor(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.CompIf]], bool]], + MatchIfTrue[Sequence[cst.CompIf]], ] ], AllOf[ @@ -2507,47 +2477,47 @@ class CompFor(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.CompIf]], bool]], + MatchIfTrue[Sequence[cst.CompIf]], ] ], ] = DoNotCare() inner_for_in: Union[ Optional["CompFor"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.CompFor]], bool]], + MatchIfTrue[Optional[cst.CompFor]], DoNotCareSentinel, OneOf[ Union[ Optional["CompFor"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.CompFor]], bool]], + MatchIfTrue[Optional[cst.CompFor]], ] ], AllOf[ Union[ Optional["CompFor"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.CompFor]], bool]], + MatchIfTrue[Optional[cst.CompFor]], ] ], ] = DoNotCare() asynchronous: Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], DoNotCareSentinel, OneOf[ Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], ] ], AllOf[ Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], ] ], ] = DoNotCare() @@ -2612,9 +2582,7 @@ class CompIf(BaseMatcherNode): ComparisonTargetMatchType = Union[ - "ComparisonTarget", - MetadataMatchType, - MatchIfTrue[Callable[[cst.ComparisonTarget], bool]], + "ComparisonTarget", MetadataMatchType, MatchIfTrue[cst.ComparisonTarget] ] @@ -2652,7 +2620,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.ComparisonTarget]], bool]], + MatchIfTrue[Sequence[cst.ComparisonTarget]], OneOf[ Union[ Sequence[ @@ -2676,7 +2644,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ComparisonTarget]], bool]], + MatchIfTrue[Sequence[cst.ComparisonTarget]], ] ], AllOf[ @@ -2702,7 +2670,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ComparisonTarget]], bool]], + MatchIfTrue[Sequence[cst.ComparisonTarget]], ] ], ] = DoNotCare() @@ -2732,7 +2700,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -2756,7 +2724,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -2782,7 +2750,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -2812,7 +2780,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -2836,7 +2804,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -2862,7 +2830,7 @@ class Comparison(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -2875,7 +2843,7 @@ class Comparison(BaseExpression, BaseMatcherNode): BaseCompOpMatchType = Union[ - "BaseCompOp", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseCompOp], bool]] + "BaseCompOp", MetadataMatchType, MatchIfTrue[cst.BaseCompOp] ] @@ -2905,18 +2873,14 @@ SimpleStringOrFormattedStringMatchType = Union[ "SimpleString", "FormattedString", MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.SimpleString, cst.FormattedString]], bool]], + MatchIfTrue[Union[cst.SimpleString, cst.FormattedString]], ] SimpleStringOrFormattedStringOrConcatenatedStringMatchType = Union[ "SimpleString", "FormattedString", "ConcatenatedString", MetadataMatchType, - MatchIfTrue[ - Callable[ - [Union[cst.SimpleString, cst.FormattedString, cst.ConcatenatedString]], bool - ] - ], + MatchIfTrue[Union[cst.SimpleString, cst.FormattedString, cst.ConcatenatedString]], ] @@ -2960,7 +2924,7 @@ class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -2984,7 +2948,7 @@ class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -3010,7 +2974,7 @@ class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -3040,7 +3004,7 @@ class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -3064,7 +3028,7 @@ class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -3090,7 +3054,7 @@ class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -3129,12 +3093,10 @@ NameOrAttributeOrCallMatchType = Union[ "Attribute", "Call", MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Name, cst.Attribute, cst.Call]], bool]], + MatchIfTrue[Union[cst.Name, cst.Attribute, cst.Call]], ] TrailingWhitespaceMatchType = Union[ - "TrailingWhitespace", - MetadataMatchType, - MatchIfTrue[Callable[[cst.TrailingWhitespace], bool]], + "TrailingWhitespace", MetadataMatchType, MatchIfTrue[cst.TrailingWhitespace] ] @@ -3172,7 +3134,7 @@ class Decorator(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -3196,7 +3158,7 @@ class Decorator(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -3222,7 +3184,7 @@ class Decorator(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -3249,7 +3211,7 @@ class Decorator(BaseMatcherNode): BaseDelTargetExpressionMatchType = Union[ "BaseDelTargetExpression", MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseDelTargetExpression], bool]], + MatchIfTrue[cst.BaseDelTargetExpression], ] @@ -3282,19 +3244,13 @@ class Del(BaseSmallStatement, BaseMatcherNode): BaseDictElementMatchType = Union[ - "BaseDictElement", - MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseDictElement], bool]], + "BaseDictElement", MetadataMatchType, MatchIfTrue[cst.BaseDictElement] ] LeftCurlyBraceMatchType = Union[ - "LeftCurlyBrace", - MetadataMatchType, - MatchIfTrue[Callable[[cst.LeftCurlyBrace], bool]], + "LeftCurlyBrace", MetadataMatchType, MatchIfTrue[cst.LeftCurlyBrace] ] RightCurlyBraceMatchType = Union[ - "RightCurlyBrace", - MetadataMatchType, - MatchIfTrue[Callable[[cst.RightCurlyBrace], bool]], + "RightCurlyBrace", MetadataMatchType, MatchIfTrue[cst.RightCurlyBrace] ] @@ -3326,7 +3282,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseDictElement]], bool]], + MatchIfTrue[Sequence[cst.BaseDictElement]], OneOf[ Union[ Sequence[ @@ -3350,7 +3306,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseDictElement]], bool]], + MatchIfTrue[Sequence[cst.BaseDictElement]], ] ], AllOf[ @@ -3376,7 +3332,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseDictElement]], bool]], + MatchIfTrue[Sequence[cst.BaseDictElement]], ] ], ] = DoNotCare() @@ -3418,7 +3374,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -3442,7 +3398,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -3468,7 +3424,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -3498,7 +3454,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -3522,7 +3478,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -3548,7 +3504,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -3560,9 +3516,7 @@ class Dict(BaseDict, BaseExpression, BaseMatcherNode): ] = DoNotCare() -CompForMatchType = Union[ - "CompFor", MetadataMatchType, MatchIfTrue[Callable[[cst.CompFor], bool]] -] +CompForMatchType = Union["CompFor", MetadataMatchType, MatchIfTrue[cst.CompFor]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -3623,7 +3577,7 @@ class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -3647,7 +3601,7 @@ class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -3673,7 +3627,7 @@ class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -3703,7 +3657,7 @@ class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -3727,7 +3681,7 @@ class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -3753,7 +3707,7 @@ class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -3927,7 +3881,7 @@ class Ellipsis(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -3951,7 +3905,7 @@ class Ellipsis(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -3977,7 +3931,7 @@ class Ellipsis(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -4007,7 +3961,7 @@ class Ellipsis(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -4031,7 +3985,7 @@ class Ellipsis(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -4057,7 +4011,7 @@ class Ellipsis(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -4103,7 +4057,7 @@ class Else(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -4127,7 +4081,7 @@ class Else(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -4153,7 +4107,7 @@ class Else(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -4171,10 +4125,8 @@ class Else(BaseMatcherNode): ] = DoNotCare() -boolMatchType = Union[bool, MetadataMatchType, MatchIfTrue[Callable[[bool], bool]]] -NewlineMatchType = Union[ - "Newline", MetadataMatchType, MatchIfTrue[Callable[[cst.Newline], bool]] -] +boolMatchType = Union[bool, MetadataMatchType, MatchIfTrue[bool]] +NewlineMatchType = Union["Newline", MetadataMatchType, MatchIfTrue[cst.Newline]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -4191,20 +4143,20 @@ class EmptyLine(BaseMatcherNode): comment: Union[ Optional["Comment"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + MatchIfTrue[Optional[cst.Comment]], DoNotCareSentinel, OneOf[ Union[ Optional["Comment"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + MatchIfTrue[Optional[cst.Comment]], ] ], AllOf[ Union[ Optional["Comment"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + MatchIfTrue[Optional[cst.Comment]], ] ], ] = DoNotCare() @@ -4255,40 +4207,36 @@ class ExceptHandler(BaseMatcherNode): type: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() name: Union[ Optional["AsName"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + MatchIfTrue[Optional[cst.AsName]], DoNotCareSentinel, OneOf[ Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] ] ], AllOf[ Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] ] ], ] = DoNotCare() @@ -4318,7 +4266,7 @@ class ExceptHandler(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -4342,7 +4290,7 @@ class ExceptHandler(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -4368,7 +4316,7 @@ class ExceptHandler(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -4448,7 +4396,7 @@ class Finally(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -4472,7 +4420,7 @@ class Finally(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -4498,7 +4446,7 @@ class Finally(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -4547,7 +4495,7 @@ class Float(BaseExpression, BaseNumber, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -4571,7 +4519,7 @@ class Float(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -4597,7 +4545,7 @@ class Float(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -4627,7 +4575,7 @@ class Float(BaseExpression, BaseNumber, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -4651,7 +4599,7 @@ class Float(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -4677,7 +4625,7 @@ class Float(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -4756,40 +4704,32 @@ class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): orelse: Union[ Optional["Else"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + MatchIfTrue[Optional[cst.Else]], DoNotCareSentinel, OneOf[ - Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], - ] + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] ], AllOf[ - Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], - ] + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] ], ] = DoNotCare() asynchronous: Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], DoNotCareSentinel, OneOf[ Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], ] ], AllOf[ Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], ] ], ] = DoNotCare() @@ -4819,7 +4759,7 @@ class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -4843,7 +4783,7 @@ class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -4869,7 +4809,7 @@ class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -4908,7 +4848,7 @@ class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): BaseFormattedStringContentMatchType = Union[ "BaseFormattedStringContent", MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseFormattedStringContent], bool]], + MatchIfTrue[cst.BaseFormattedStringContent], ] @@ -4940,7 +4880,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseFormattedStringContent]], bool]], + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], OneOf[ Union[ Sequence[ @@ -4964,7 +4904,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseFormattedStringContent]], bool]], + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], ] ], AllOf[ @@ -4990,7 +4930,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseFormattedStringContent]], bool]], + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], ] ], ] = DoNotCare() @@ -5000,20 +4940,20 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): end: Union[ Literal['"', "'", '"""', "'''"], MetadataMatchType, - MatchIfTrue[Callable[[Literal['"', "'", '"""', "'''"]], bool]], + MatchIfTrue[Literal['"', "'", '"""', "'''"]], DoNotCareSentinel, OneOf[ Union[ Literal['"', "'", '"""', "'''"], MetadataMatchType, - MatchIfTrue[Callable[[Literal['"', "'", '"""', "'''"]], bool]], + MatchIfTrue[Literal['"', "'", '"""', "'''"]], ] ], AllOf[ Union[ Literal['"', "'", '"""', "'''"], MetadataMatchType, - MatchIfTrue[Callable[[Literal['"', "'", '"""', "'''"]], bool]], + MatchIfTrue[Literal['"', "'", '"""', "'''"]], ] ], ] = DoNotCare() @@ -5043,7 +4983,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -5067,7 +5007,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -5093,7 +5033,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -5123,7 +5063,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -5147,7 +5087,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -5173,7 +5113,7 @@ class FormattedString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -5196,46 +5136,28 @@ class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): conversion: Union[ Optional[str], MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], + MatchIfTrue[Optional[str]], DoNotCareSentinel, - OneOf[ - Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], - ] - ], - AllOf[ - Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], - ] - ], + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], ] = DoNotCare() format_spec: Union[ Optional[Sequence["BaseFormattedStringContent"]], MetadataMatchType, - MatchIfTrue[ - Callable[[Optional[Sequence[cst.BaseFormattedStringContent]]], bool] - ], + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], DoNotCareSentinel, OneOf[ Union[ Optional[Sequence["BaseFormattedStringContent"]], MetadataMatchType, - MatchIfTrue[ - Callable[[Optional[Sequence[cst.BaseFormattedStringContent]]], bool] - ], + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], ] ], AllOf[ Union[ Optional[Sequence["BaseFormattedStringContent"]], MetadataMatchType, - MatchIfTrue[ - Callable[[Optional[Sequence[cst.BaseFormattedStringContent]]], bool] - ], + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], ] ], ] = DoNotCare() @@ -5254,20 +5176,20 @@ class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): equal: Union[ Optional["AssignEqual"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AssignEqual]], bool]], + MatchIfTrue[Optional[cst.AssignEqual]], DoNotCareSentinel, OneOf[ Union[ Optional["AssignEqual"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AssignEqual]], bool]], + MatchIfTrue[Optional[cst.AssignEqual]], ] ], AllOf[ Union[ Optional["AssignEqual"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AssignEqual]], bool]], + MatchIfTrue[Optional[cst.AssignEqual]], ] ], ] = DoNotCare() @@ -5321,7 +5243,7 @@ class From(BaseMatcherNode): ParametersMatchType = Union[ - "Parameters", MetadataMatchType, MatchIfTrue[Callable[[cst.Parameters], bool]] + "Parameters", MetadataMatchType, MatchIfTrue[cst.Parameters] ] @@ -5368,7 +5290,7 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Decorator]], bool]], + MatchIfTrue[Sequence[cst.Decorator]], OneOf[ Union[ Sequence[ @@ -5392,7 +5314,7 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Decorator]], bool]], + MatchIfTrue[Sequence[cst.Decorator]], ] ], AllOf[ @@ -5418,47 +5340,47 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Decorator]], bool]], + MatchIfTrue[Sequence[cst.Decorator]], ] ], ] = DoNotCare() returns: Union[ Optional["Annotation"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + MatchIfTrue[Optional[cst.Annotation]], DoNotCareSentinel, OneOf[ Union[ Optional["Annotation"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + MatchIfTrue[Optional[cst.Annotation]], ] ], AllOf[ Union[ Optional["Annotation"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + MatchIfTrue[Optional[cst.Annotation]], ] ], ] = DoNotCare() asynchronous: Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], DoNotCareSentinel, OneOf[ Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], ] ], AllOf[ Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], ] ], ] = DoNotCare() @@ -5488,7 +5410,7 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -5512,7 +5434,7 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -5538,7 +5460,7 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -5568,7 +5490,7 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -5592,7 +5514,7 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -5618,7 +5540,7 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -5694,7 +5616,7 @@ class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -5718,7 +5640,7 @@ class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -5744,7 +5666,7 @@ class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -5774,7 +5696,7 @@ class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -5798,7 +5720,7 @@ class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -5824,7 +5746,7 @@ class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -5836,9 +5758,7 @@ class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): ] = DoNotCare() -NameItemMatchType = Union[ - "NameItem", MetadataMatchType, MatchIfTrue[Callable[[cst.NameItem], bool]] -] +NameItemMatchType = Union["NameItem", MetadataMatchType, MatchIfTrue[cst.NameItem]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -5869,7 +5789,7 @@ class Global(BaseSmallStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.NameItem]], bool]], + MatchIfTrue[Sequence[cst.NameItem]], OneOf[ Union[ Sequence[ @@ -5893,7 +5813,7 @@ class Global(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.NameItem]], bool]], + MatchIfTrue[Sequence[cst.NameItem]], ] ], AllOf[ @@ -5919,7 +5839,7 @@ class Global(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.NameItem]], bool]], + MatchIfTrue[Sequence[cst.NameItem]], ] ], ] = DoNotCare() @@ -5988,11 +5908,7 @@ class GreaterThanEqual(BaseCompOp, BaseMatcherNode): IfOrElseOrNoneMatchType = Union[ - "If", - "Else", - None, - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.If, cst.Else, None]], bool]], + "If", "Else", None, MetadataMatchType, MatchIfTrue[Union[cst.If, cst.Else, None]] ] @@ -6042,7 +5958,7 @@ class If(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -6066,7 +5982,7 @@ class If(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -6092,7 +6008,7 @@ class If(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -6162,7 +6078,7 @@ class IfExp(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -6186,7 +6102,7 @@ class IfExp(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -6212,7 +6128,7 @@ class IfExp(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -6242,7 +6158,7 @@ class IfExp(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -6266,7 +6182,7 @@ class IfExp(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -6292,7 +6208,7 @@ class IfExp(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -6359,7 +6275,7 @@ class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -6383,7 +6299,7 @@ class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -6409,7 +6325,7 @@ class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -6439,7 +6355,7 @@ class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -6463,7 +6379,7 @@ class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -6489,7 +6405,7 @@ class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -6502,7 +6418,7 @@ class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): ImportAliasMatchType = Union[ - "ImportAlias", MetadataMatchType, MatchIfTrue[Callable[[cst.ImportAlias], bool]] + "ImportAlias", MetadataMatchType, MatchIfTrue[cst.ImportAlias] ] @@ -6534,7 +6450,7 @@ class Import(BaseSmallStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], OneOf[ Union[ Sequence[ @@ -6558,7 +6474,7 @@ class Import(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], ] ], AllOf[ @@ -6584,7 +6500,7 @@ class Import(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], ] ], ] = DoNotCare() @@ -6609,10 +6525,7 @@ class Import(BaseSmallStatement, BaseMatcherNode): AttributeOrNameMatchType = Union[ - "Attribute", - "Name", - MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Attribute, cst.Name]], bool]], + "Attribute", "Name", MetadataMatchType, MatchIfTrue[Union[cst.Attribute, cst.Name]] ] @@ -6627,20 +6540,16 @@ class ImportAlias(BaseMatcherNode): asname: Union[ Optional["AsName"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + MatchIfTrue[Optional[cst.AsName]], DoNotCareSentinel, OneOf[ Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] ] ], AllOf[ Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] ] ], ] = DoNotCare() @@ -6660,7 +6569,7 @@ AttributeOrNameOrNoneMatchType = Union[ "Name", None, MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Attribute, cst.Name, None]], bool]], + MatchIfTrue[Union[cst.Attribute, cst.Name, None]], ] @@ -6699,7 +6608,7 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], OneOf[ Union[ Sequence[ @@ -6723,7 +6632,7 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], ] ], AllOf[ @@ -6749,23 +6658,18 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], ] ], ], "ImportStar", MetadataMatchType, MatchIfTrue[ - Callable[ - [ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - bool, + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], ] ], DoNotCareSentinel, @@ -6793,7 +6697,7 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], OneOf[ Union[ Sequence[ @@ -6817,7 +6721,7 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], ] ], AllOf[ @@ -6843,23 +6747,18 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], ] ], ], "ImportStar", MetadataMatchType, MatchIfTrue[ - Callable[ - [ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - bool, + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], ] ], ] @@ -6888,7 +6787,7 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], OneOf[ Union[ Sequence[ @@ -6912,7 +6811,7 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], ] ], AllOf[ @@ -6938,23 +6837,18 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ImportAlias]], bool]], + MatchIfTrue[Sequence[cst.ImportAlias]], ] ], ], "ImportStar", MetadataMatchType, MatchIfTrue[ - Callable[ - [ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - bool, + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], ] ], ] @@ -6986,7 +6880,7 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Dot]], bool]], + MatchIfTrue[Sequence[cst.Dot]], OneOf[ Union[ Sequence[ @@ -7006,7 +6900,7 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Dot]], bool]], + MatchIfTrue[Sequence[cst.Dot]], ] ], AllOf[ @@ -7028,47 +6922,47 @@ class ImportFrom(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Dot]], bool]], + MatchIfTrue[Sequence[cst.Dot]], ] ], ] = DoNotCare() lpar: Union[ Optional["LeftParen"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.LeftParen]], bool]], + MatchIfTrue[Optional[cst.LeftParen]], DoNotCareSentinel, OneOf[ Union[ Optional["LeftParen"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.LeftParen]], bool]], + MatchIfTrue[Optional[cst.LeftParen]], ] ], AllOf[ Union[ Optional["LeftParen"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.LeftParen]], bool]], + MatchIfTrue[Optional[cst.LeftParen]], ] ], ] = DoNotCare() rpar: Union[ Optional["RightParen"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.RightParen]], bool]], + MatchIfTrue[Optional[cst.RightParen]], DoNotCareSentinel, OneOf[ Union[ Optional["RightParen"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.RightParen]], bool]], + MatchIfTrue[Optional[cst.RightParen]], ] ], AllOf[ Union[ Optional["RightParen"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.RightParen]], bool]], + MatchIfTrue[Optional[cst.RightParen]], ] ], ] = DoNotCare() @@ -7137,7 +7031,7 @@ class In(BaseCompOp, BaseMatcherNode): BaseStatementMatchType = Union[ - "BaseStatement", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseStatement], bool]] + "BaseStatement", MetadataMatchType, MatchIfTrue[cst.BaseStatement] ] @@ -7169,7 +7063,7 @@ class IndentedBlock(BaseSuite, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseStatement]], OneOf[ Union[ Sequence[ @@ -7193,7 +7087,7 @@ class IndentedBlock(BaseSuite, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseStatement]], ] ], AllOf[ @@ -7219,7 +7113,7 @@ class IndentedBlock(BaseSuite, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseStatement]], ] ], ] = DoNotCare() @@ -7232,22 +7126,10 @@ class IndentedBlock(BaseSuite, BaseMatcherNode): indent: Union[ Optional[str], MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], + MatchIfTrue[Optional[str]], DoNotCareSentinel, - OneOf[ - Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], - ] - ], - AllOf[ - Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], - ] - ], + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], ] = DoNotCare() footer: Union[ Sequence[ @@ -7275,7 +7157,7 @@ class IndentedBlock(BaseSuite, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -7299,7 +7181,7 @@ class IndentedBlock(BaseSuite, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -7325,7 +7207,7 @@ class IndentedBlock(BaseSuite, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -7384,7 +7266,7 @@ class Integer(BaseExpression, BaseNumber, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -7408,7 +7290,7 @@ class Integer(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -7434,7 +7316,7 @@ class Integer(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -7464,7 +7346,7 @@ class Integer(BaseExpression, BaseNumber, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -7488,7 +7370,7 @@ class Integer(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -7514,7 +7396,7 @@ class Integer(BaseExpression, BaseNumber, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -7576,9 +7458,7 @@ class IsNot(BaseCompOp, BaseMatcherNode): ] = DoNotCare() -ColonMatchType = Union[ - "Colon", MetadataMatchType, MatchIfTrue[Callable[[cst.Colon], bool]] -] +ColonMatchType = Union["Colon", MetadataMatchType, MatchIfTrue[cst.Colon]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -7624,7 +7504,7 @@ class Lambda(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -7648,7 +7528,7 @@ class Lambda(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -7674,7 +7554,7 @@ class Lambda(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -7704,7 +7584,7 @@ class Lambda(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -7728,7 +7608,7 @@ class Lambda(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -7754,7 +7634,7 @@ class Lambda(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -7909,17 +7789,13 @@ class LessThanEqual(BaseCompOp, BaseMatcherNode): BaseElementMatchType = Union[ - "BaseElement", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseElement], bool]] + "BaseElement", MetadataMatchType, MatchIfTrue[cst.BaseElement] ] LeftSquareBracketMatchType = Union[ - "LeftSquareBracket", - MetadataMatchType, - MatchIfTrue[Callable[[cst.LeftSquareBracket], bool]], + "LeftSquareBracket", MetadataMatchType, MatchIfTrue[cst.LeftSquareBracket] ] RightSquareBracketMatchType = Union[ - "RightSquareBracket", - MetadataMatchType, - MatchIfTrue[Callable[[cst.RightSquareBracket], bool]], + "RightSquareBracket", MetadataMatchType, MatchIfTrue[cst.RightSquareBracket] ] @@ -7957,7 +7833,7 @@ class List( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], OneOf[ Union[ Sequence[ @@ -7981,7 +7857,7 @@ class List( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], ] ], AllOf[ @@ -8007,7 +7883,7 @@ class List( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], ] ], ] = DoNotCare() @@ -8049,7 +7925,7 @@ class List( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -8073,7 +7949,7 @@ class List( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -8099,7 +7975,7 @@ class List( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -8129,7 +8005,7 @@ class List( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -8153,7 +8029,7 @@ class List( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -8179,7 +8055,7 @@ class List( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -8243,7 +8119,7 @@ class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNo ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -8267,7 +8143,7 @@ class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNo ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -8293,7 +8169,7 @@ class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNo ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -8323,7 +8199,7 @@ class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNo ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -8347,7 +8223,7 @@ class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNo ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -8373,7 +8249,7 @@ class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNo ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -8449,9 +8325,7 @@ SimpleStatementLineOrBaseCompoundStatementMatchType = Union[ "SimpleStatementLine", "BaseCompoundStatement", MetadataMatchType, - MatchIfTrue[ - Callable[[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], bool] - ], + MatchIfTrue[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], ] @@ -8484,26 +8358,13 @@ class Module(BaseMatcherNode): ], DoNotCareSentinel, MatchIfTrue[ - Callable[ - [ - Sequence[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - AllOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - ] - ] - ], - bool, + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], + AllOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], + ] ] ], OneOf[ @@ -8538,28 +8399,21 @@ class Module(BaseMatcherNode): ] ], MatchIfTrue[ - Callable[ - [ - Sequence[ + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[ Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - ] - ], - AllOf[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - ] - ], + cst.SimpleStatementLine, cst.BaseCompoundStatement ] - ] - ], - bool, + ], + AllOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + ] ] ], ] @@ -8596,28 +8450,21 @@ class Module(BaseMatcherNode): ] ], MatchIfTrue[ - Callable[ - [ - Sequence[ + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[ Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - ] - ], - AllOf[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - ] - ], + cst.SimpleStatementLine, cst.BaseCompoundStatement ] - ] - ], - bool, + ], + AllOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + ] ] ], ] @@ -8649,7 +8496,7 @@ class Module(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -8673,7 +8520,7 @@ class Module(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -8699,7 +8546,7 @@ class Module(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -8729,7 +8576,7 @@ class Module(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -8753,7 +8600,7 @@ class Module(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -8779,7 +8626,7 @@ class Module(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -8924,7 +8771,7 @@ class Name( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -8948,7 +8795,7 @@ class Name( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -8974,7 +8821,7 @@ class Name( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -9004,7 +8851,7 @@ class Name( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -9028,7 +8875,7 @@ class Name( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -9054,7 +8901,7 @@ class Name( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -9122,7 +8969,7 @@ class NamedExpr(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -9146,7 +8993,7 @@ class NamedExpr(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -9172,7 +9019,7 @@ class NamedExpr(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -9202,7 +9049,7 @@ class NamedExpr(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -9226,7 +9073,7 @@ class NamedExpr(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -9252,7 +9099,7 @@ class NamedExpr(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -9281,22 +9128,10 @@ class Newline(BaseMatcherNode): value: Union[ Optional[str], MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], + MatchIfTrue[Optional[str]], DoNotCareSentinel, - OneOf[ - Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], - ] - ], - AllOf[ - Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[str]], bool]], - ] - ], + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], ] = DoNotCare() metadata: Union[ MetadataMatchType, @@ -9334,7 +9169,7 @@ class Nonlocal(BaseSmallStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.NameItem]], bool]], + MatchIfTrue[Sequence[cst.NameItem]], OneOf[ Union[ Sequence[ @@ -9358,7 +9193,7 @@ class Nonlocal(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.NameItem]], bool]], + MatchIfTrue[Sequence[cst.NameItem]], ] ], AllOf[ @@ -9384,7 +9219,7 @@ class Nonlocal(BaseSmallStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.NameItem]], bool]], + MatchIfTrue[Sequence[cst.NameItem]], ] ], ] = DoNotCare() @@ -9507,20 +9342,20 @@ class Param(BaseMatcherNode): annotation: Union[ Optional["Annotation"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + MatchIfTrue[Optional[cst.Annotation]], DoNotCareSentinel, OneOf[ Union[ Optional["Annotation"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + MatchIfTrue[Optional[cst.Annotation]], ] ], AllOf[ Union[ Optional["Annotation"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Annotation]], bool]], + MatchIfTrue[Optional[cst.Annotation]], ] ], ] = DoNotCare() @@ -9533,20 +9368,20 @@ class Param(BaseMatcherNode): default: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() @@ -9602,17 +9437,15 @@ class ParamStar(BaseMatcherNode): ] = DoNotCare() -ParamMatchType = Union[ - "Param", MetadataMatchType, MatchIfTrue[Callable[[cst.Param], bool]] -] +ParamMatchType = Union["Param", MetadataMatchType, MatchIfTrue[cst.Param]] ParamOrParamStarMatchType = Union[ "Param", "ParamStar", MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.Param, cst.ParamStar]], bool]], + MatchIfTrue[Union[cst.Param, cst.ParamStar]], ] ParamSlashMatchType = Union[ - "ParamSlash", MetadataMatchType, MatchIfTrue[Callable[[cst.ParamSlash], bool]] + "ParamSlash", MetadataMatchType, MatchIfTrue[cst.ParamSlash] ] @@ -9644,7 +9477,7 @@ class Parameters(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], OneOf[ Union[ Sequence[ @@ -9668,7 +9501,7 @@ class Parameters(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], ] ], AllOf[ @@ -9694,7 +9527,7 @@ class Parameters(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], ] ], ] = DoNotCare() @@ -9730,7 +9563,7 @@ class Parameters(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], OneOf[ Union[ Sequence[ @@ -9754,7 +9587,7 @@ class Parameters(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], ] ], AllOf[ @@ -9780,27 +9613,23 @@ class Parameters(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], ] ], ] = DoNotCare() star_kwarg: Union[ Optional["Param"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Param]], bool]], + MatchIfTrue[Optional[cst.Param]], DoNotCareSentinel, OneOf[ Union[ - Optional["Param"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Param]], bool]], + Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] ] ], AllOf[ Union[ - Optional["Param"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Param]], bool]], + Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] ] ], ] = DoNotCare() @@ -9830,7 +9659,7 @@ class Parameters(BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], OneOf[ Union[ Sequence[ @@ -9854,7 +9683,7 @@ class Parameters(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], ] ], AllOf[ @@ -9880,7 +9709,7 @@ class Parameters(BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.Param]], bool]], + MatchIfTrue[Sequence[cst.Param]], ] ], ] = DoNotCare() @@ -9932,7 +9761,7 @@ class ParenthesizedWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -9956,7 +9785,7 @@ class ParenthesizedWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -9982,7 +9811,7 @@ class ParenthesizedWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -10084,41 +9913,33 @@ class Raise(BaseSmallStatement, BaseMatcherNode): exc: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() cause: Union[ Optional["From"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.From]], bool]], + MatchIfTrue[Optional[cst.From]], DoNotCareSentinel, OneOf[ - Union[ - Optional["From"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.From]], bool]], - ] + Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] ], AllOf[ - Union[ - Optional["From"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.From]], bool]], - ] + Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] ], ] = DoNotCare() whitespace_after_raise: Union[ @@ -10146,20 +9967,20 @@ class Return(BaseSmallStatement, BaseMatcherNode): value: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() @@ -10325,7 +10146,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], OneOf[ Union[ Sequence[ @@ -10349,7 +10170,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], ] ], AllOf[ @@ -10375,7 +10196,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], ] ], ] = DoNotCare() @@ -10417,7 +10238,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -10441,7 +10262,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -10467,7 +10288,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -10497,7 +10318,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -10521,7 +10342,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -10547,7 +10368,7 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -10611,7 +10432,7 @@ class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -10635,7 +10456,7 @@ class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -10661,7 +10482,7 @@ class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -10691,7 +10512,7 @@ class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -10715,7 +10536,7 @@ class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -10741,7 +10562,7 @@ class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -10754,9 +10575,7 @@ class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode BaseSmallStatementMatchType = Union[ - "BaseSmallStatement", - MetadataMatchType, - MatchIfTrue[Callable[[cst.BaseSmallStatement], bool]], + "BaseSmallStatement", MetadataMatchType, MatchIfTrue[cst.BaseSmallStatement] ] @@ -10788,7 +10607,7 @@ class SimpleStatementLine(BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseSmallStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], OneOf[ Union[ Sequence[ @@ -10812,7 +10631,7 @@ class SimpleStatementLine(BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseSmallStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], ] ], AllOf[ @@ -10838,7 +10657,7 @@ class SimpleStatementLine(BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseSmallStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], ] ], ] = DoNotCare() @@ -10868,7 +10687,7 @@ class SimpleStatementLine(BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -10892,7 +10711,7 @@ class SimpleStatementLine(BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -10918,7 +10737,7 @@ class SimpleStatementLine(BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -10964,7 +10783,7 @@ class SimpleStatementSuite(BaseSuite, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseSmallStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], OneOf[ Union[ Sequence[ @@ -10988,7 +10807,7 @@ class SimpleStatementSuite(BaseSuite, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseSmallStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], ] ], AllOf[ @@ -11014,7 +10833,7 @@ class SimpleStatementSuite(BaseSuite, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseSmallStatement]], bool]], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], ] ], ] = DoNotCare() @@ -11069,7 +10888,7 @@ class SimpleString(BaseExpression, BaseString, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -11093,7 +10912,7 @@ class SimpleString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -11119,7 +10938,7 @@ class SimpleString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -11149,7 +10968,7 @@ class SimpleString(BaseExpression, BaseString, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -11173,7 +10992,7 @@ class SimpleString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -11199,7 +11018,7 @@ class SimpleString(BaseExpression, BaseString, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -11229,60 +11048,60 @@ class Slice(BaseSlice, BaseMatcherNode): lower: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() upper: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() step: Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], DoNotCareSentinel, OneOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], AllOf[ Union[ Optional["BaseExpression"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.BaseExpression]], bool]], + MatchIfTrue[Optional[cst.BaseExpression]], ] ], ] = DoNotCare() @@ -11362,7 +11181,7 @@ class StarredElement(BaseElement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -11386,7 +11205,7 @@ class StarredElement(BaseElement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -11412,7 +11231,7 @@ class StarredElement(BaseElement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -11442,7 +11261,7 @@ class StarredElement(BaseElement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -11466,7 +11285,7 @@ class StarredElement(BaseElement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -11492,7 +11311,7 @@ class StarredElement(BaseElement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -11511,9 +11330,7 @@ class StarredElement(BaseElement, BaseMatcherNode): SubscriptElementMatchType = Union[ - "SubscriptElement", - MetadataMatchType, - MatchIfTrue[Callable[[cst.SubscriptElement], bool]], + "SubscriptElement", MetadataMatchType, MatchIfTrue[cst.SubscriptElement] ] @@ -11553,7 +11370,7 @@ class Subscript( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.SubscriptElement]], bool]], + MatchIfTrue[Sequence[cst.SubscriptElement]], OneOf[ Union[ Sequence[ @@ -11577,7 +11394,7 @@ class Subscript( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.SubscriptElement]], bool]], + MatchIfTrue[Sequence[cst.SubscriptElement]], ] ], AllOf[ @@ -11603,7 +11420,7 @@ class Subscript( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.SubscriptElement]], bool]], + MatchIfTrue[Sequence[cst.SubscriptElement]], ] ], ] = DoNotCare() @@ -11645,7 +11462,7 @@ class Subscript( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -11669,7 +11486,7 @@ class Subscript( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -11695,7 +11512,7 @@ class Subscript( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -11725,7 +11542,7 @@ class Subscript( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -11749,7 +11566,7 @@ class Subscript( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -11775,7 +11592,7 @@ class Subscript( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -11793,9 +11610,7 @@ class Subscript( ] = DoNotCare() -BaseSliceMatchType = Union[ - "BaseSlice", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseSlice], bool]] -] +BaseSliceMatchType = Union["BaseSlice", MetadataMatchType, MatchIfTrue[cst.BaseSlice]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -11872,20 +11687,20 @@ class TrailingWhitespace(BaseMatcherNode): comment: Union[ Optional["Comment"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + MatchIfTrue[Optional[cst.Comment]], DoNotCareSentinel, OneOf[ Union[ Optional["Comment"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + MatchIfTrue[Optional[cst.Comment]], ] ], AllOf[ Union[ Optional["Comment"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Comment]], bool]], + MatchIfTrue[Optional[cst.Comment]], ] ], ] = DoNotCare() @@ -11904,7 +11719,7 @@ class TrailingWhitespace(BaseMatcherNode): ExceptHandlerMatchType = Union[ - "ExceptHandler", MetadataMatchType, MatchIfTrue[Callable[[cst.ExceptHandler], bool]] + "ExceptHandler", MetadataMatchType, MatchIfTrue[cst.ExceptHandler] ] @@ -11942,7 +11757,7 @@ class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.ExceptHandler]], bool]], + MatchIfTrue[Sequence[cst.ExceptHandler]], OneOf[ Union[ Sequence[ @@ -11966,7 +11781,7 @@ class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ExceptHandler]], bool]], + MatchIfTrue[Sequence[cst.ExceptHandler]], ] ], AllOf[ @@ -11992,47 +11807,39 @@ class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.ExceptHandler]], bool]], + MatchIfTrue[Sequence[cst.ExceptHandler]], ] ], ] = DoNotCare() orelse: Union[ Optional["Else"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + MatchIfTrue[Optional[cst.Else]], DoNotCareSentinel, OneOf[ - Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], - ] + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] ], AllOf[ - Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], - ] + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] ], ] = DoNotCare() finalbody: Union[ Optional["Finally"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Finally]], bool]], + MatchIfTrue[Optional[cst.Finally]], DoNotCareSentinel, OneOf[ Union[ Optional["Finally"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Finally]], bool]], + MatchIfTrue[Optional[cst.Finally]], ] ], AllOf[ Union[ Optional["Finally"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Finally]], bool]], + MatchIfTrue[Optional[cst.Finally]], ] ], ] = DoNotCare() @@ -12062,7 +11869,7 @@ class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -12086,7 +11893,7 @@ class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -12112,7 +11919,7 @@ class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -12160,7 +11967,7 @@ class Tuple( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], OneOf[ Union[ Sequence[ @@ -12184,7 +11991,7 @@ class Tuple( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], ] ], AllOf[ @@ -12210,7 +12017,7 @@ class Tuple( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.BaseElement]], bool]], + MatchIfTrue[Sequence[cst.BaseElement]], ] ], ] = DoNotCare() @@ -12240,7 +12047,7 @@ class Tuple( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -12264,7 +12071,7 @@ class Tuple( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -12290,7 +12097,7 @@ class Tuple( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -12320,7 +12127,7 @@ class Tuple( ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -12344,7 +12151,7 @@ class Tuple( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -12370,7 +12177,7 @@ class Tuple( ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -12383,7 +12190,7 @@ class Tuple( BaseUnaryOpMatchType = Union[ - "BaseUnaryOp", MetadataMatchType, MatchIfTrue[Callable[[cst.BaseUnaryOp], bool]] + "BaseUnaryOp", MetadataMatchType, MatchIfTrue[cst.BaseUnaryOp] ] @@ -12427,7 +12234,7 @@ class UnaryOperation(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -12451,7 +12258,7 @@ class UnaryOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -12477,7 +12284,7 @@ class UnaryOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -12507,7 +12314,7 @@ class UnaryOperation(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -12531,7 +12338,7 @@ class UnaryOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -12557,7 +12364,7 @@ class UnaryOperation(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() @@ -12586,21 +12393,13 @@ class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): orelse: Union[ Optional["Else"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], + MatchIfTrue[Optional[cst.Else]], DoNotCareSentinel, OneOf[ - Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], - ] + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] ], AllOf[ - Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Else]], bool]], - ] + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] ], ] = DoNotCare() leading_lines: Union[ @@ -12629,7 +12428,7 @@ class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -12653,7 +12452,7 @@ class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -12679,7 +12478,7 @@ class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -12703,9 +12502,7 @@ class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] = DoNotCare() -WithItemMatchType = Union[ - "WithItem", MetadataMatchType, MatchIfTrue[Callable[[cst.WithItem], bool]] -] +WithItemMatchType = Union["WithItem", MetadataMatchType, MatchIfTrue[cst.WithItem]] @dataclass(frozen=True, eq=False, unsafe_hash=False) @@ -12736,7 +12533,7 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.WithItem]], bool]], + MatchIfTrue[Sequence[cst.WithItem]], OneOf[ Union[ Sequence[ @@ -12760,7 +12557,7 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.WithItem]], bool]], + MatchIfTrue[Sequence[cst.WithItem]], ] ], AllOf[ @@ -12786,7 +12583,7 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.WithItem]], bool]], + MatchIfTrue[Sequence[cst.WithItem]], ] ], ] = DoNotCare() @@ -12799,20 +12596,20 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): asynchronous: Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], DoNotCareSentinel, OneOf[ Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], ] ], AllOf[ Union[ Optional["Asynchronous"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.Asynchronous]], bool]], + MatchIfTrue[Optional[cst.Asynchronous]], ] ], ] = DoNotCare() @@ -12842,7 +12639,7 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], OneOf[ Union[ Sequence[ @@ -12866,7 +12663,7 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], AllOf[ @@ -12892,7 +12689,7 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.EmptyLine]], bool]], + MatchIfTrue[Sequence[cst.EmptyLine]], ] ], ] = DoNotCare() @@ -12927,20 +12724,16 @@ class WithItem(BaseMatcherNode): asname: Union[ Optional["AsName"], MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + MatchIfTrue[Optional[cst.AsName]], DoNotCareSentinel, OneOf[ Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] ] ], AllOf[ Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Callable[[Optional[cst.AsName]], bool]], + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] ] ], ] = DoNotCare() @@ -12960,7 +12753,7 @@ BaseExpressionOrFromOrNoneMatchType = Union[ "From", None, MetadataMatchType, - MatchIfTrue[Callable[[Union[cst.BaseExpression, cst.From, None]], bool]], + MatchIfTrue[Union[cst.BaseExpression, cst.From, None]], ] @@ -12998,7 +12791,7 @@ class Yield(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], OneOf[ Union[ Sequence[ @@ -13022,7 +12815,7 @@ class Yield(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], AllOf[ @@ -13048,7 +12841,7 @@ class Yield(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.LeftParen]], bool]], + MatchIfTrue[Sequence[cst.LeftParen]], ] ], ] = DoNotCare() @@ -13078,7 +12871,7 @@ class Yield(BaseExpression, BaseMatcherNode): ] ], DoNotCareSentinel, - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], OneOf[ Union[ Sequence[ @@ -13102,7 +12895,7 @@ class Yield(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], AllOf[ @@ -13128,7 +12921,7 @@ class Yield(BaseExpression, BaseMatcherNode): ], ] ], - MatchIfTrue[Callable[[Sequence[cst.RightParen]], bool]], + MatchIfTrue[Sequence[cst.RightParen]], ] ], ] = DoNotCare() diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 149904e3..8eeb1fb3 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -49,7 +49,7 @@ class DoNotCareSentinel(Enum): _MatcherT = TypeVar("_MatcherT", covariant=True) -_CallableT = TypeVar("_CallableT", bound="Callable", covariant=True) +_MatchIfTrueT = TypeVar("_MatchIfTrueT", covariant=True) _BaseMatcherNodeSelfT = TypeVar("_BaseMatcherNodeSelfT", bound="BaseMatcherNode") _OtherNodeT = TypeVar("_OtherNodeT") _MetadataValueT = TypeVar("_MetadataValueT") @@ -472,7 +472,7 @@ class _ExtractMatchingNode(Generic[_MatcherT]): ) -class MatchIfTrue(Generic[_CallableT]): +class MatchIfTrue(Generic[_MatchIfTrueT]): """ Matcher that matches if its child callable returns ``True``. The child callable should take one argument which is the attribute on the LibCST node we are @@ -491,13 +491,13 @@ class MatchIfTrue(Generic[_CallableT]): you are passing to :func:`matches`. """ - def __init__(self, func: _CallableT) -> None: - # Without a cast, pyre thinks that self.func is not a function, even though - # it recognizes that it is a _CallableT bound to Callable. - self._func: Callable[[object], bool] = cast(Callable[[object], bool], func) + _func: Callable[[_MatchIfTrueT], bool] + + def __init__(self, func: Callable[[_MatchIfTrueT], bool]) -> None: + self._func = func @property - def func(self) -> Callable[[object], bool]: + def func(self) -> Callable[[_MatchIfTrueT], bool]: """ The function that we will call with a LibCST node in order to determine if we match. If the function returns ``True`` then we consider ourselves @@ -507,33 +507,33 @@ class MatchIfTrue(Generic[_CallableT]): def __or__( self, other: _OtherNodeT - ) -> "OneOf[Union[MatchIfTrue[_CallableT], _OtherNodeT]]": + ) -> "OneOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]]": # Without a cast, pyre thinks that the below OneOf is type OneOf[object] # even though it has the types passed into it. return cast( - OneOf[Union[MatchIfTrue[_CallableT], _OtherNodeT]], OneOf(self, other) + OneOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]], OneOf(self, other) ) def __and__( self, other: _OtherNodeT - ) -> "AllOf[Union[MatchIfTrue[_CallableT], _OtherNodeT]]": + ) -> "AllOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]]": # Without a cast, pyre thinks that the below AllOf is type AllOf[object] # even though it has the types passed into it. return cast( - AllOf[Union[MatchIfTrue[_CallableT], _OtherNodeT]], AllOf(self, other) + AllOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]], AllOf(self, other) ) - def __invert__(self) -> "MatchIfTrue[_CallableT]": + def __invert__(self) -> "MatchIfTrue[_MatchIfTrueT]": # Construct a wrapped version of MatchIfTrue for typing simplicity. # Without the cast, pyre doesn't seem to think the lambda is valid. - return MatchIfTrue(cast(_CallableT, lambda val: not self._func(val))) + return MatchIfTrue(lambda val: not self._func(val)) def __repr__(self) -> str: # pyre-ignore Pyre doesn't believe that functions have a repr. return f"MatchIfTrue({repr(self._func)})" -def MatchRegex(regex: Union[str, Pattern[str]]) -> MatchIfTrue[Callable[[str], bool]]: +def MatchRegex(regex: Union[str, Pattern[str]]) -> MatchIfTrue[str]: """ Used as a convenience wrapper to :class:`MatchIfTrue` which allows for matching a string attribute against a regex. ``regex`` can be any regular @@ -1032,7 +1032,7 @@ def _matches_zero_nodes( matcher: Union[ BaseMatcherNode, _BaseWildcardNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, DoNotCareSentinel, ] @@ -1062,7 +1062,7 @@ def _sequence_matches( # noqa: C901 Union[ BaseMatcherNode, _BaseWildcardNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, DoNotCareSentinel, ] @@ -1288,7 +1288,7 @@ def _attribute_matches( # noqa: C901 Union[ BaseMatcherNode, _BaseWildcardNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], DoNotCareSentinel, ] ], @@ -1371,19 +1371,19 @@ def _node_matches( # noqa: C901 node: libcst.CSTNode, matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, _InverseOf[ Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, ] ], _ExtractMatchingNode[ Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, ] ], @@ -1454,19 +1454,19 @@ def _matches( node: Union[MaybeSentinel, libcst.CSTNode], matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, _InverseOf[ Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, ] ], _ExtractMatchingNode[ Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, ] ], @@ -1605,12 +1605,12 @@ class _FindAllVisitor(libcst.CSTVisitor): self, matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, _InverseOf[ Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, ] ], @@ -1636,7 +1636,7 @@ def _find_or_extract_all( tree: Union[MaybeSentinel, RemovalSentinel, libcst.CSTNode, meta.MetadataWrapper], matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, # The inverse clause is left off of the public functions `findall` and # `extractall` because we play a dirty trick. We lie to the typechecker @@ -1647,7 +1647,7 @@ def _find_or_extract_all( _InverseOf[ Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, ] ], @@ -1687,9 +1687,7 @@ def _find_or_extract_all( def findall( tree: Union[MaybeSentinel, RemovalSentinel, libcst.CSTNode, meta.MetadataWrapper], - matcher: Union[ - BaseMatcherNode, MatchIfTrue[Callable[[object], bool]], _BaseMetadataMatcher - ], + matcher: Union[BaseMatcherNode, MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher], *, metadata_resolver: Optional[ Union[libcst.MetadataDependent, libcst.MetadataWrapper] @@ -1722,9 +1720,7 @@ def findall( def extractall( tree: Union[MaybeSentinel, RemovalSentinel, libcst.CSTNode, meta.MetadataWrapper], - matcher: Union[ - BaseMatcherNode, MatchIfTrue[Callable[[object], bool]], _BaseMetadataMatcher - ], + matcher: Union[BaseMatcherNode, MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher], *, metadata_resolver: Optional[ Union[libcst.MetadataDependent, libcst.MetadataWrapper] @@ -1764,12 +1760,12 @@ class _ReplaceTransformer(libcst.CSTTransformer): self, matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, _InverseOf[ Union[ BaseMatcherNode, - MatchIfTrue[Callable[[object], bool]], + MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, ] ], @@ -1866,9 +1862,7 @@ class _ReplaceTransformer(libcst.CSTTransformer): def replace( tree: Union[MaybeSentinel, RemovalSentinel, libcst.CSTNode, meta.MetadataWrapper], - matcher: Union[ - BaseMatcherNode, MatchIfTrue[Callable[[object], bool]], _BaseMetadataMatcher - ], + matcher: Union[BaseMatcherNode, MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher], replacement: Union[ MaybeSentinel, RemovalSentinel, diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index 0c3b65a4..83f07739 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -570,7 +570,7 @@ class MatcherDecoratableTransformer(CSTTransformer): tree: Union[cst.MaybeSentinel, cst.RemovalSentinel, cst.CSTNode], matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[..., bool]], + MatchIfTrue[cst.CSTNode], MatchMetadata, MatchMetadataIfTrue, ], @@ -603,7 +603,7 @@ class MatcherDecoratableTransformer(CSTTransformer): tree: Union[cst.MaybeSentinel, cst.RemovalSentinel, cst.CSTNode], matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[..., bool]], + MatchIfTrue[cst.CSTNode], MatchMetadata, MatchMetadataIfTrue, ], @@ -622,7 +622,7 @@ class MatcherDecoratableTransformer(CSTTransformer): tree: Union[cst.MaybeSentinel, cst.RemovalSentinel, cst.CSTNode], matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[..., bool]], + MatchIfTrue[cst.CSTNode], MatchMetadata, MatchMetadataIfTrue, ], @@ -765,7 +765,7 @@ class MatcherDecoratableVisitor(CSTVisitor): tree: Union[cst.MaybeSentinel, cst.RemovalSentinel, cst.CSTNode], matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[..., bool]], + MatchIfTrue[cst.CSTNode], MatchMetadata, MatchMetadataIfTrue, ], @@ -798,7 +798,7 @@ class MatcherDecoratableVisitor(CSTVisitor): tree: Union[cst.MaybeSentinel, cst.RemovalSentinel, cst.CSTNode], matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[..., bool]], + MatchIfTrue[cst.CSTNode], MatchMetadata, MatchMetadataIfTrue, ], @@ -817,7 +817,7 @@ class MatcherDecoratableVisitor(CSTVisitor): tree: Union[cst.MaybeSentinel, cst.RemovalSentinel, cst.CSTNode], matcher: Union[ BaseMatcherNode, - MatchIfTrue[Callable[..., bool]], + MatchIfTrue[cst.CSTNode], MatchMetadata, MatchMetadataIfTrue, ], From 39607edb2d6bf99f3cc7ffe6ff2e63f64b363bca Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 21 Sep 2021 19:39:34 +0100 Subject: [PATCH 136/632] Bump version to 0.3.21 (#524) --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 73f79ea1..7ad6de4a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +# 0.3.21 - 2021-09-21 + +## Fixed +- Fix pyre command for type inference provider [#523](https://github.com/Instagram/LibCST/pull/523) + +## Updated +- Change codegen to treat typing.Union[Foo, NoneType] and typing.Optional[Foo] as the same [#508]((https://github.com/Instagram/LibCST/pull/508) +- Rewrite the MatchIfTrue type to be generic on _MatchIfTrueT [#512](https://github.com/Instagram/LibCST/pull/512) +- Add python3.9 to the CI [#506](https://github.com/Instagram/LibCST/pull/506) +- Various CI changes [#471](https://github.com/Instagram/LibCST/pull/471) [#510](https://github.com/Instagram/LibCST/pull/510) [#505](https://github.com/Instagram/LibCST/pull/505) [#515](https://github.com/Instagram/LibCST/pull/515) [#516](https://github.com/Instagram/LibCST/pull/516) + # 0.3.20 - 2021-08-09 ## Fixed From 71b8002ccadd89d74a6b95e587226745aeccb311 Mon Sep 17 00:00:00 2001 From: John Reese Date: Tue, 28 Sep 2021 09:47:12 -0700 Subject: [PATCH 137/632] Add --indent-string option to `libcst.tool print` (#525) Allows passing a custom indent string, like ". ", for easier visual parsing of the resulting tree. --- libcst/tool.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/libcst/tool.py b/libcst/tool.py index 44fd367a..e7612bd4 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -136,6 +136,7 @@ def _node_repr_recursive( # noqa: C901 child_tokens.extend( _node_repr_recursive( value, + indent=indent, show_whitespace=show_whitespace, show_defaults=show_defaults, show_syntax=show_syntax, @@ -152,6 +153,7 @@ def _node_repr_recursive( # noqa: C901 list_tokens.extend( _node_repr_recursive( v, + indent=indent, show_whitespace=show_whitespace, show_defaults=show_defaults, show_syntax=show_syntax, @@ -247,6 +249,11 @@ def _print_tree_impl(proc_name: str, command_args: List[str]) -> int: action="store_true", help="Show values that exist only for syntax, like commas or semicolons", ) + parser.add_argument( + "--indent-string", + default=_DEFAULT_INDENT, + help=f"String to use for indenting levels, defaults to {_DEFAULT_INDENT!r}", + ) parser.add_argument( "-p", "--python-version", @@ -279,6 +286,7 @@ def _print_tree_impl(proc_name: str, command_args: List[str]) -> int: print( dump( tree, + indent=args.indent_string, show_defaults=args.show_defaults, show_syntax=args.show_syntax, show_whitespace=args.show_whitespace, From 13485d3c2f5bb3e4b6219dac6ba7bb08c851f21f Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Fri, 1 Oct 2021 23:40:47 +0800 Subject: [PATCH 138/632] Fix documentation typos (#527) --- docs/source/matchers.rst | 4 ++-- docs/source/matchers_tutorial.ipynb | 2 +- docs/source/tutorial.ipynb | 4 ++-- libcst/matchers/_matcher_base.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/source/matchers.rst b/docs/source/matchers.rst index ae67516a..a89a1a68 100644 --- a/docs/source/matchers.rst +++ b/docs/source/matchers.rst @@ -13,7 +13,7 @@ defining what attributes on a node matter when matching against predefined patte To accomplish this, a matcher has been created which corresponds to each LibCST node documented in :ref:`libcst-nodes`. Matchers default each of their attributes -to the special sentinal matcher :func:`~libcst.matchers.DoNotCare`. When constructing +to the special sentinel matcher :func:`~libcst.matchers.DoNotCare`. When constructing a matcher, you can initialize the node with only the values of attributes that you are concerned with, leaving the rest of the attributes set to :func:`~libcst.matchers.DoNotCare` in order to skip comparing against them. @@ -79,7 +79,7 @@ Traversal Order ^^^^^^^^^^^^^^^ Visit and leave functions created using :func:`~libcst.matchers.visit` or -:func:`~libcst.matchers.leave` follow the traveral order rules laid out in +:func:`~libcst.matchers.leave` follow the traversal order rules laid out in LibCST's visitor :ref:`libcst-visitor-traversal` with one additional rule. Any visit function created using the :func:`~libcst.matchers.visit` decorator will be called **before** a ``visit_`` function if it is defined for your visitor. diff --git a/docs/source/matchers_tutorial.ipynb b/docs/source/matchers_tutorial.ipynb index 1add7bca..6222bf7f 100644 --- a/docs/source/matchers_tutorial.ipynb +++ b/docs/source/matchers_tutorial.ipynb @@ -234,7 +234,7 @@ "into your :ref:`libcst-visitors` in order to identify which nodes you care ", "about. Matcher :ref:`libcst-matcher-decorators` help reduce that boilerplate.\n", "\n", - "Say you wanted to invert the the boolean literals in functions which ", + "Say you wanted to invert the boolean literals in functions which ", "match the above ``best_is_call_with_booleans``. You could build something ", "that looks like the following:" ] diff --git a/docs/source/tutorial.ipynb b/docs/source/tutorial.ipynb index 29e59808..a7c3cd6b 100644 --- a/docs/source/tutorial.ipynb +++ b/docs/source/tutorial.ipynb @@ -113,7 +113,7 @@ " self.stack: List[Tuple[str, ...]] = []\n", " # store the annotations\n", " self.annotations: Dict[\n", - " Tuple[str, ...], # key: tuple of cononical class/function name\n", + " Tuple[str, ...], # key: tuple of canonical class/function name\n", " Tuple[cst.Parameters, Optional[cst.Annotation]], # value: (params, returns)\n", " ] = {}\n", "\n", @@ -140,7 +140,7 @@ " self.stack: List[Tuple[str, ...]] = []\n", " # store the annotations\n", " self.annotations: Dict[\n", - " Tuple[str, ...], # key: tuple of cononical class/function name\n", + " Tuple[str, ...], # key: tuple of canonical class/function name\n", " Tuple[cst.Parameters, Optional[cst.Annotation]], # value: (params, returns)\n", " ] = annotations\n", "\n", diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 8eeb1fb3..9dc49400 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -1711,7 +1711,7 @@ def findall( or a :class:`OneOf`/:class:`AllOf` special matcher. Unlike :func:`matches`, it can also be a :class:`MatchIfTrue` or :func:`DoesNotMatch` matcher, since we are traversing the tree looking for matches. It cannot be a :class:`AtLeastN` or - :class:`AtMostN` matcher because these types are wildcards which can only be usedi + :class:`AtMostN` matcher because these types are wildcards which can only be used inside sequences. """ nodes, _ = _find_or_extract_all(tree, matcher, metadata_resolver=metadata_resolver) From 1e0d4841bfcfc2a399a826208c2d1b5419ca4c44 Mon Sep 17 00:00:00 2001 From: Zac Hatfield-Dodds Date: Thu, 21 Oct 2021 16:25:59 +1100 Subject: [PATCH 139/632] Fix m.OneOf() docs (#529) Closes #345. --- libcst/matchers/_matcher_base.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 9dc49400..28f5c5e9 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -210,17 +210,6 @@ class OneOf(Generic[_MatcherT], BaseMatcherNode): m.Name("True") | m.Name("False") - Note that a :class:`OneOf` matcher can be used anywhere you are defining - a matcher attribute. So, an alternate form to the first example looks like:: - - m.Name(m.OneOf("True", "False")) - - A downside to the alternate form is that you can no longer use Python's - bitwise or operator to construct the :class:`OneOf` since it is not defined - for strings. However, the upside is that it is more concise. We do not - recommend any one form over the other, and leave it up to you to decide what - is best for your use case. - """ def __init__(self, *options: Union[_MatcherT, "OneOf[_MatcherT]"]) -> None: From 14eff6aaf5a3019547ea2e0f90c012732bb4e4f6 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 27 Oct 2021 11:08:38 -0400 Subject: [PATCH 140/632] Remove a dead method in ApplyTypeAnnotationsVisitor (#533) I'm not really sure how the method got there, but it was calling itself recursively... fortunately, it was also overwritten by an identically named method so it was actually impossible to access. --- libcst/codemod/visitors/_apply_type_annotations.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 9b170de1..129ec208 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -257,17 +257,6 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # insert top-level annotations. self.import_statements: List[cst.ImportFrom] = [] - @staticmethod - def store_stub_in_context( - context: CodemodContext, - stub: cst.Module, - overwrite_existing_annotations: bool = False, - ) -> None: - # deprecated, should be removed in 0.4 release. - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, stub, overwrite_existing_annotations - ) - @staticmethod def store_stub_in_context( context: CodemodContext, From 1f169b8b8d2fb15daf3d81b60ba106bd5fad225c Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 28 Oct 2021 12:46:57 -0400 Subject: [PATCH 141/632] Count the information we add in ApplyTypeAnnotationsVisitor (#537) Refactor ApplyTypeAnnotationsVisitor so that all annotation information is added via a smart constructor method starting with `_apply_annotation_to`. This makes it much easier to skim the code and understand where annotations are actually added with a simple forward search. Then, add an AnnotationCounts dataclass and count up all the annotations we add inside the transform. This should be helpful for a few reasons: - First, it just makes counting the annotations easier. Prior to this change, we would have to run some separate command to count annotations before and after a codemod, which is not as convenient as doing it directly, and would also fail to account for cases where we changed an annotation. - Second, I want to be able to avoid altering the import statements in cases where we never actually made any changes. Having annotation counts will help us do this - we can just return the original tree (without import changes) in that situtation. ``` > python -m unittest libcst.codemod.visitors.tests.test_apply_type_annotations.TestApplyAnnotationsVisitor ................................................ ---------------------------------------------------------------------- Ran 48 tests in 1.773s OK ``` ( --- .../visitors/_apply_type_annotations.py | 81 ++++++++++- .../tests/test_apply_type_annotations.py | 134 +++++++++++++++++- 2 files changed, 208 insertions(+), 7 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 129ec208..1d1b1480 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -200,6 +200,24 @@ class Annotations: class_definitions: Dict[str, cst.ClassDef] = field(default_factory=dict) +@dataclass +class AnnotationCounts: + global_annotations: int = 0 + attribute_annotations: int = 0 + parameter_annotations: int = 0 + return_annotations: int = 0 + classes_added: int = 0 + + def applied_changes(self): + return ( + self.global_annotations + + self.attribute_annotations + + self.parameter_annotations + + self.return_annotations + + self.classes_added + ) > 0 + + class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): """ Apply type annotations to a source module using the given stub mdules. @@ -257,6 +275,11 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # insert top-level annotations. self.import_statements: List[cst.ImportFrom] = [] + # We use this to report annotations added, as well as to determine + # whether to abandon the codemod in edge cases where we may have + # only made changes to the imports. + self.annotation_counts: AnnotationCounts = AnnotationCounts() + @staticmethod def store_stub_in_context( context: CodemodContext, @@ -306,6 +329,40 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): tree_with_imports = AddImportsVisitor(self.context).transform_module(tree) return tree_with_imports.visit(self) + # smart constructors: all applied annotations happen via one of these + + def _apply_annotation_to_attribute_or_global( + self, + name: str, + annotation: cst.Annotation, + value: Optional[cst.BaseExpression], + ) -> cst.AnnAssign: + if len(self.qualifier) == 0: + self.annotation_counts.global_annotations += 1 + else: + self.annotation_counts.attribute_annotations += 1 + return cst.AnnAssign(cst.Name(name), annotation, value) + + def _apply_annotation_to_parameter( + self, + parameter: cst.Param, + annotation: cst.Annotation, + ) -> cst.Param: + self.annotation_counts.parameter_annotations += 1 + return parameter.with_changes( + annotation=annotation, + ) + + def _apply_annotation_to_return( + self, + function_def: cst.FunctionDef, + annotation: cst.Annotation, + ) -> cst.FunctionDef: + self.annotation_counts.return_annotations += 1 + return function_def.with_changes(returns=annotation) + + # private methods used in the visit and leave methods + def _qualifier_name(self) -> str: return ".".join(self.qualifier) @@ -333,7 +390,11 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self._qualifier_name() ] self.qualifier.pop() - return cst.AnnAssign(cst.Name(name), annotation, node.value) + return self._apply_annotation_to_attribute_or_global( + name=name, + annotation=annotation, + value=node.value, + ) else: self.qualifier.pop() return updated_node @@ -388,8 +449,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): if key in parameter_annotations and ( self.overwrite_existing_annotations or not parameter.annotation ): - parameter = parameter.with_changes( - annotation=parameter_annotations[key] + parameter = self._apply_annotation_to_parameter( + parameter=parameter, + annotation=parameter_annotations[key], ) annotated_parameters.append(parameter) return annotated_parameters @@ -431,6 +493,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): *statements[1:], ] + # transform API methods + def visit_ClassDef(self, node: cst.ClassDef) -> None: self.qualifier.append(node.name.value) self.visited_classes.add(node.name.value) @@ -459,8 +523,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.overwrite_existing_annotations and function_annotation.returns ) if set_return_annotation: - updated_node = updated_node.with_changes( - returns=function_annotation.returns + updated_node = self._apply_annotation_to_return( + function_def=updated_node, annotation=function_annotation.returns ) # Don't override default values when annotating functions new_parameters = self._update_parameters(function_annotation, updated_node) @@ -510,9 +574,14 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): statements_after_imports = self._insert_empty_line(statements_after_imports) for name, annotation in self.toplevel_annotations.items(): - annotated_assign = cst.AnnAssign(cst.Name(name), annotation, None) + annotated_assign = self._apply_annotation_to_attribute_or_global( + name=name, + annotation=annotation, + value=None, + ) toplevel_statements.append(cst.SimpleStatementLine([annotated_assign])) + self.annotation_counts.classes_added = len(fresh_class_definitions) toplevel_statements.extend(fresh_class_definitions) return updated_node.with_changes( diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 4a93b082..f2b28953 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -11,7 +11,10 @@ from typing import Dict, Type from libcst import parse_module from libcst.codemod import Codemod, CodemodContext, CodemodTest -from libcst.codemod.visitors._apply_type_annotations import ApplyTypeAnnotationsVisitor +from libcst.codemod.visitors._apply_type_annotations import ( + ApplyTypeAnnotationsVisitor, + AnnotationCounts, +) from libcst.testing.utils import data_provider @@ -954,3 +957,132 @@ class TestApplyAnnotationsVisitor(CodemodTest): after=after, overwrite_existing_annotations=True, ) + + @data_provider( + { + "test_counting_parameters_and_returns": ( + """ + def f(counted: int, not_counted) -> Counted: ... + + def g(not_counted: int, counted: str) -> Counted: ... + + def h(counted: int) -> NotCounted: ... + + def not_in_module(x: int, y: int) -> str: ... + """, + """ + def f(counted, not_counted): + return Counted() + + def g(not_counted: int, counted): + return Counted() + + def h(counted) -> NotCounted: + return Counted() + """, + """ + def f(counted: int, not_counted) -> Counted: + return Counted() + + def g(not_counted: int, counted: str) -> Counted: + return Counted() + + def h(counted: int) -> NotCounted: + return Counted() + """, + AnnotationCounts( + parameter_annotations=3, + return_annotations=2, + ), + True, + ), + "test_counting_globals_classes_and_attributes": ( + """ + global0: int = ... + global1: int + + class InModule: + attr_will_be_found: int + attr_will_not_be_found: int + + class NotInModule: + attr: int + """, + """ + global0 = 1 + global1, global2 = (1, 1) + + class InModule: + attr_will_be_found = 0 + def __init__(self): + self.attr_will_not_be_found = 1 + """, + """ + global1: int + + class NotInModule: + attr: int + + global0: int = 1 + global1, global2 = (1, 1) + + class InModule: + attr_will_be_found: int = 0 + def __init__(self): + self.attr_will_not_be_found = 1 + + """, + AnnotationCounts( + global_annotations=2, + attribute_annotations=1, + classes_added=1, + ), + True, + ), + "test_counting_no_changes": ( + """ + class C: + attr_will_not_be_found: bar.X + """, + """ + class C: + def __init__(self): + self.attr_will_not_be_found = None + """, + # TODO: use the annotation counts to avoid adding + # the import in this case. + """ + from bar import X + + class C: + def __init__(self): + self.attr_will_not_be_found = None + """, + AnnotationCounts(), + False, + ), + } + ) + def test_count_annotations( + self, + stub: str, + before: str, + after: str, + annotation_counts: AnnotationCounts, + applied_changes: False, + ): + stub = self.make_fixture_data(stub) + before = self.make_fixture_data(before) + after = self.make_fixture_data(after) + + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context=context, stub=parse_module(stub) + ) + visitor = ApplyTypeAnnotationsVisitor(context=context) + + output_code = visitor.transform_module(parse_module(before)).code + + self.assertEqual(after, output_code) + self.assertEqual(str(annotation_counts), str(visitor.annotation_counts)) + self.assertEqual(applied_changes, visitor.annotation_counts.applied_changes()) From 3743c702dce203eb677ab5c38e14f887998143f1 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 28 Oct 2021 13:46:18 -0400 Subject: [PATCH 142/632] Use QualifiedNameProvider to handle stub types (#536) The existing TypeCollector visitor logic attempted to fold actual imports from stubs together with the module we were annotating, and separately do nice things with the names of types so that we could parse stubs written either with various sorts of proper imports *or* stubs written using bare fully-qualified type names (which isn't actually legal python, but is easy to produce from automated tools like `pyre infer`). In this commit I simplify things in principle - meaning the data flow is simpler, although the code is still similarly complex - by using `QualifiedNameProvider` plus a fallback to `get_full_name_for_node` to handle all cases via fully-qualified names, so that the way a stub chooses to lay out its imports is no longer relevant to how we will understand it. As a result, we can scrap a whole test suite where we were understanding edge cases in the import handling, and moreover one of the weird unsupported edge cases is now well supported. The tests got simpler because some edge cases no longer matter (the whole imports test is no longer relevant), and a couple of weird edge cases were fixed. I ran tests with ``` python -m unittest libcst.codemod.visitors.tests.test_apply_type_annotations.TestApplyAnnotationsVisitor ``` I tried to make this change minimal in that I preserve the existing data flow, so that it's easy to review. But it's worth considering whether to follow up with a diff where we change the TypeAnnotationCollector into a *transform* rather than a *visitor*, because that would allow us to scrap quite a bit of logic - all we would need to know is a couple of bits of context from higher up in the tree and we could process Names and Attributes without needing all this recursion. --- .../visitors/_apply_type_annotations.py | 186 ++++++++++++------ .../tests/test_apply_type_annotations.py | 45 +---- 2 files changed, 126 insertions(+), 105 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 1d1b1480..2ac44c02 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -8,12 +8,16 @@ from dataclasses import dataclass, field from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst as cst -from libcst import matchers as m from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor from libcst.helpers import get_full_name_for_node +from libcst.metadata import QualifiedNameProvider, PositionProvider + + +NameOrAttribute = Union[cst.Name, cst.Attribute] +NAME_OR_ATTRIBUTE = (cst.Name, cst.Attribute) def _get_import_alias_names(import_aliases: Sequence[cst.ImportAlias]) -> Set[str]: @@ -50,6 +54,11 @@ class TypeCollector(cst.CSTVisitor): Collect type annotations from a stub module. """ + METADATA_DEPENDENCIES = ( + PositionProvider, + QualifiedNameProvider, + ) + def __init__(self, existing_imports: Set[str], context: CodemodContext) -> None: # Qualifier for storing the canonical name of the current function. self.qualifier: List[str] = [] @@ -62,7 +71,23 @@ class TypeCollector(cst.CSTVisitor): def visit_ClassDef(self, node: cst.ClassDef) -> None: self.qualifier.append(node.name.value) - self.class_definitions[node.name.value] = node + new_bases = [] + for base in node.bases: + value = base.value + if isinstance(value, NAME_OR_ATTRIBUTE): + new_value = self._handle_NameOrAttribute(value) + elif isinstance(base.value, cst.Subscript): + new_value = self._handle_Subscript(value) + else: + start = self.get_metadata(PositionProvider, node).start + raise ValueError( + "Invalid type used as base class in stub file at " + + f"{start.line}:{start.column}. Only subscripts, names, and " + + "attributes are valid base classes for static typing." + ) + new_bases.append(base.with_changes(value=new_value)) + + self.class_definitions[node.name.value] = node.with_changes(bases=new_bases) def leave_ClassDef(self, original_node: cst.ClassDef) -> None: self.qualifier.pop() @@ -71,11 +96,9 @@ class TypeCollector(cst.CSTVisitor): self.qualifier.append(node.name.value) returns = node.returns return_annotation = ( - self._create_import_from_annotation(returns) - if returns is not None - else None + self._handle_Annotation(annotation=returns) if returns is not None else None ) - parameter_annotations = self._import_parameter_annotations(node.params) + parameter_annotations = self._handle_Parameters(node.params) self.function_annotations[".".join(self.qualifier)] = FunctionAnnotation( parameters=parameter_annotations, returns=return_annotation ) @@ -90,102 +113,137 @@ class TypeCollector(cst.CSTVisitor): name = get_full_name_for_node(node.target) if name is not None: self.qualifier.append(name) - annotation_value = self._create_import_from_annotation(node.annotation) + annotation_value = self._handle_Annotation(annotation=node.annotation) self.attribute_annotations[".".join(self.qualifier)] = annotation_value return True def leave_AnnAssign(self, original_node: cst.AnnAssign) -> None: self.qualifier.pop() - def visit_ImportFrom(self, node: cst.ImportFrom) -> None: - module = node.module - names = node.names + def _get_unique_qualified_name(self, node: cst.CSTNode) -> str: + names = [q.name for q in self.get_metadata(QualifiedNameProvider, node)] + if len(names) == 0: + # we hit this branch if the stub is directly using a fully + # qualified name, which is not technically valid python but is + # convenient to allow. + return get_full_name_for_node(node) + elif len(names) == 1: + return names[0] + else: + start = self.get_metadata(PositionProvider, node).start + raise ValueError( + "Could not resolve a unique qualified name for type " + + f"{get_full_name_for_node(node)} at {start.line}:{start.column}. " + + f"Candidate names were: {names!r}" + ) - # module is None for relative imports like `from .. import foo`. - # We ignore these for now. - if module is None or isinstance(names, cst.ImportStar): - return - module_name = get_full_name_for_node(module) - if module_name is not None: - for import_name in _get_import_alias_names(names): - AddImportsVisitor.add_needed_import( - self.context, module_name, import_name - ) + def _get_qualified_name_and_dequalified_node( + self, + node: Union[cst.Name, cst.Attribute], + ) -> Tuple[str, Union[cst.Name, cst.Attribute]]: + qualified_name = self._get_unique_qualified_name(node) + dequalified_node = node.attr if isinstance(node, cst.Attribute) else node + return qualified_name, dequalified_node - def _add_annotation_to_imports( - self, annotation: cst.Attribute + def _handle_qualification_and_should_qualify(self, qualified_name: str) -> bool: + """ + Basd on a qualified name and the existing module imports, record that + we need to add an import if necessary and return whether or not we + should use the qualified name due to a preexisting import. + """ + split_name = qualified_name.split(".") + if len(split_name) > 1 and qualified_name not in self.existing_imports: + module, target = ".".join(split_name[:-1]), split_name[-1] + if module == "builtins": + return False + elif module in self.existing_imports: + return True + else: + AddImportsVisitor.add_needed_import(self.context, module, target) + return False + + # Handler functions. + # + # Each of these does one of two things, possibly recursively, over some + # valid CST node for a static type: + # - process the qualified name and ensure we will add necessary imports + # - dequalify the node + + def _handle_NameOrAttribute( + self, + node: NameOrAttribute, ) -> Union[cst.Name, cst.Attribute]: - key = get_full_name_for_node(annotation.value) - if key is not None: - # Don't attempt to re-import existing imports. - if key in self.existing_imports: - return annotation - import_name = get_full_name_for_node(annotation.attr) - if import_name is not None: - AddImportsVisitor.add_needed_import(self.context, key, import_name) - return annotation.attr + ( + qualified_name, + dequalified_node, + ) = self._get_qualified_name_and_dequalified_node(node) + should_qualify = self._handle_qualification_and_should_qualify(qualified_name) + if should_qualify: + return node + else: + return dequalified_node def _handle_Index(self, slice: cst.Index, node: cst.Subscript) -> cst.Subscript: value = slice.value if isinstance(value, cst.Subscript): - new_slice = slice.with_changes(value=self._handle_Subscript(value)) - return node.with_changes(slice=new_slice) + return slice.with_changes(value=self._handle_Subscript(value)) elif isinstance(value, cst.Attribute): - new_slice = slice.with_changes(value=self._add_annotation_to_imports(value)) - return node.with_changes(slice=new_slice) + return slice.with_changes(value=self._handle_NameOrAttribute(value)) else: - return node + return slice def _handle_Subscript(self, node: cst.Subscript) -> cst.Subscript: + value = node.value + if isinstance(value, NAME_OR_ATTRIBUTE): + new_node = node.with_changes(value=self._handle_NameOrAttribute(value)) + else: + raise ValueError("Expected any indexed type to have") + if self._get_unique_qualified_name(node) in ("Type", "typing.Type"): + # Note: we are intentionally not handling qualification of + # anything inside `Type` because it's common to have nested + # classes, which we cannot currently distinguish from classes + # coming from other modules, appear here. + return new_node slice = node.slice - if m.matches(node.value, m.Name(value="Type")): - return node - if isinstance(slice, list): + if isinstance(slice, tuple): new_slice = [] for item in slice: value = item.slice.value - if isinstance(value, cst.Attribute): - name = self._add_annotation_to_imports(item.slice.value) + if isinstance(value, NAME_OR_ATTRIBUTE): + name = self._handle_NameOrAttribute(item.slice.value) new_index = item.slice.with_changes(value=name) new_slice.append(item.with_changes(slice=new_index)) else: - if isinstance(item.slice, cst.Index) and not isinstance( - item.slice.value, cst.Name - ): + if isinstance(item.slice, cst.Index): new_index = item.slice.with_changes( value=self._handle_Index(item.slice, item) ) - item = item.with_changes(slice=new_index, comma=None) + item = item.with_changes(slice=new_index) new_slice.append(item) - return node.with_changes(slice=new_slice) + return new_node.with_changes(slice=tuple(new_slice)) elif isinstance(slice, cst.Index): - return self._handle_Index(slice, node) + new_slice = self._handle_Index(slice) + return new_node.with_changes(slice=new_slice) else: - return node + return new_node - def _create_import_from_annotation(self, returns: cst.Annotation) -> cst.Annotation: - annotation = returns.annotation - if isinstance(annotation, cst.Attribute): - attr = self._add_annotation_to_imports(annotation) - return cst.Annotation(annotation=attr) - if isinstance(annotation, cst.Subscript): - value = annotation.value - if m.matches(value, m.Name(value="Type")): - return returns - return cst.Annotation(annotation=self._handle_Subscript(annotation)) + def _handle_Annotation(self, annotation: cst.Annotation) -> cst.Annotation: + node = annotation.annotation + if isinstance(node, cst.SimpleString): + return annotation + elif isinstance(node, cst.Subscript): + return cst.Annotation(annotation=self._handle_Subscript(node)) else: - return returns + return cst.Annotation(annotation=self._handle_NameOrAttribute(node)) - def _import_parameter_annotations( - self, parameters: cst.Parameters - ) -> cst.Parameters: + def _handle_Parameters(self, parameters: cst.Parameters) -> cst.Parameters: def update_annotations(parameters: Sequence[cst.Param]) -> List[cst.Param]: updated_parameters = [] for parameter in list(parameters): annotation = parameter.annotation if annotation is not None: parameter = parameter.with_changes( - annotation=self._create_import_from_annotation(annotation) + annotation=self._handle_Annotation(annotation=annotation) ) updated_parameters.append(parameter) return updated_parameters @@ -321,7 +379,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.overwrite_existing_annotations or overwrite_existing_annotations ) visitor = TypeCollector(existing_import_names, self.context) - stub.visit(visitor) + cst.MetadataWrapper(stub).visit(visitor) self.annotations.function_annotations.update(visitor.function_annotations) self.annotations.attribute_annotations.update(visitor.attribute_annotations) self.annotations.class_definitions.update(visitor.class_definitions) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index f2b28953..4a63ab29 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -56,45 +56,6 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) self.assertCodemod(before, after, context_override=context) - @data_provider( - { - "supported_cases": ( - """ - from __future__ import annotations - from foo import Foo - from baz import Baz - """, - """ - from foo import Bar - import bar - """, - """ - from __future__ import annotations - from foo import Foo, Bar - import bar - from baz import Baz - """, - ), - "unsupported_cases": ( - """ - from Foo import foo as bar - import foo - from .. import baz - from boo import * - """, - """ - """, - # This is a bug, it would be better to just ignor aliased - # imports than to add them incorrectly. - """ - from Foo import bar - """, - ), - } - ) - def test_merge_module_imports(self, stub: str, before: str, after: str) -> None: - self.run_simple_test_case(stub=stub, before=before, after=after) - @data_provider( { "simple": ( @@ -361,7 +322,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): pass """, ), - "UNSUPPORTED_add_imports_for_generics": ( + "add_imports_for_generics": ( """ def foo(x: int) -> typing.Optional[Example]: ... """, @@ -370,7 +331,9 @@ class TestApplyAnnotationsVisitor(CodemodTest): pass """, """ - def foo(x: int) -> typing.Optional[Example]: + from typing import Optional + + def foo(x: int) -> Optional[Example]: pass """, ), From 87625d02b6cb321c9c29ba1c67d81ce954a1a396 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 28 Oct 2021 14:25:28 -0400 Subject: [PATCH 143/632] Do not add imports if we added no type info in ATAV In ApplyTypeAnnotationsVisitor, there are edge cases where we might have changed the module imports even though we never wound up applying any type annotations. This will become even more common if we support adding `from __future__ import annotations`, which I would like to do soon. To handle this, we can simply return the original tree from `transform_module_impl` (discarding any changes from either `self` or `AddImportsVisitor`) whenever there are no changes in `self.annotation_counts`. I updated the no-annotations-changed test to reflect this: ``` > python -m unittest libcst.codemod.visitors.tests.test_apply_type_annotations.TestApplyAnnotationsVisitor ............................................... ---------------------------------------------------------------------- Ran 47 tests in 2.312s OK ``` --- libcst/codemod/visitors/_apply_type_annotations.py | 10 ++++++++-- .../visitors/tests/test_apply_type_annotations.py | 8 ++------ 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 2ac44c02..0e0244e2 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -266,7 +266,7 @@ class AnnotationCounts: return_annotations: int = 0 classes_added: int = 0 - def applied_changes(self): + def any_changes(self): return ( self.global_annotations + self.attribute_annotations @@ -385,7 +385,13 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.annotations.class_definitions.update(visitor.class_definitions) tree_with_imports = AddImportsVisitor(self.context).transform_module(tree) - return tree_with_imports.visit(self) + tree_with_changes = tree_with_imports.visit(self) + + # don't modify the imports if we didn't actually add any type information + if self.annotation_counts.any_changes(): + return tree_with_changes + else: + return tree # smart constructors: all applied annotations happen via one of these diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 4a63ab29..e337f41a 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -1012,11 +1012,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): def __init__(self): self.attr_will_not_be_found = None """, - # TODO: use the annotation counts to avoid adding - # the import in this case. """ - from bar import X - class C: def __init__(self): self.attr_will_not_be_found = None @@ -1032,7 +1028,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): before: str, after: str, annotation_counts: AnnotationCounts, - applied_changes: False, + any_changes: False, ): stub = self.make_fixture_data(stub) before = self.make_fixture_data(before) @@ -1048,4 +1044,4 @@ class TestApplyAnnotationsVisitor(CodemodTest): self.assertEqual(after, output_code) self.assertEqual(str(annotation_counts), str(visitor.annotation_counts)) - self.assertEqual(applied_changes, visitor.annotation_counts.applied_changes()) + self.assertEqual(any_changes, visitor.annotation_counts.any_changes()) From a352d56970f56e884e04bf8e09441e3d72647580 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 28 Oct 2021 15:24:41 -0400 Subject: [PATCH 144/632] Support use_future_annotations in ApplyTypeAnnotationsVisitor Note: I'm pushing this because it works, but I actually want to add annotation counting first and then modify the code so that we only add the import if an annotation was actually included. --- .../visitors/_apply_type_annotations.py | 26 +++++++++-- .../tests/test_apply_type_annotations.py | 46 ++++++++++++++++++- 2 files changed, 66 insertions(+), 6 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 0e0244e2..cc53d656 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -266,7 +266,7 @@ class AnnotationCounts: return_annotations: int = 0 classes_added: int = 0 - def any_changes(self): + def any_changes_applied(self): return ( self.global_annotations + self.attribute_annotations @@ -318,6 +318,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): context: CodemodContext, annotations: Optional[Annotations] = None, overwrite_existing_annotations: bool = False, + use_future_annotations: bool = False, ) -> None: super().__init__(context) # Qualifier for storing the canonical name of the current function. @@ -328,6 +329,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.toplevel_annotations: Dict[str, cst.Annotation] = {} self.visited_classes: Set[str] = set() self.overwrite_existing_annotations = overwrite_existing_annotations + self.use_future_annotations = use_future_annotations # We use this to determine the end of the import block so that we can # insert top-level annotations. @@ -343,6 +345,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): context: CodemodContext, stub: cst.Module, overwrite_existing_annotations: bool = False, + use_future_annotations: bool = False, ) -> None: """ Store a stub module in the :class:`~libcst.codemod.CodemodContext` so @@ -358,6 +361,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): context.scratch[ApplyTypeAnnotationsVisitor.CONTEXT_KEY] = ( stub, overwrite_existing_annotations, + use_future_annotations, ) def transform_module_impl(self, tree: cst.Module) -> cst.Module: @@ -374,21 +378,35 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): ApplyTypeAnnotationsVisitor.CONTEXT_KEY ) if context_contents is not None: - stub, overwrite_existing_annotations = context_contents + ( + stub, + overwrite_existing_annotations, + use_future_annotations, + ) = context_contents self.overwrite_existing_annotations = ( self.overwrite_existing_annotations or overwrite_existing_annotations ) + self.use_future_annotations = ( + self.use_future_annotations or use_future_annotations + ) visitor = TypeCollector(existing_import_names, self.context) cst.MetadataWrapper(stub).visit(visitor) self.annotations.function_annotations.update(visitor.function_annotations) self.annotations.attribute_annotations.update(visitor.attribute_annotations) self.annotations.class_definitions.update(visitor.class_definitions) - tree_with_imports = AddImportsVisitor(self.context).transform_module(tree) + tree_with_imports = AddImportsVisitor( + context=self.context, + imports=( + [("__future__", "annotations", None)] + if self.use_future_annotations + else () + ), + ).transform_module(tree) tree_with_changes = tree_with_imports.visit(self) # don't modify the imports if we didn't actually add any type information - if self.annotation_counts.any_changes(): + if self.annotation_counts.any_changes_applied(): return tree_with_changes else: return tree diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index e337f41a..2f97e37c 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -921,6 +921,46 @@ class TestApplyAnnotationsVisitor(CodemodTest): overwrite_existing_annotations=True, ) + @data_provider( + { + "basic_example_using_future_annotations": ( + """ + def f() -> bool: ... + """, + """ + def f(): + return True + """, + """ + from __future__ import annotations + + def f() -> bool: + return True + """, + ), + "no_use_future_if_no_changes": ( + """ + def f() -> bool: ... + """, + """ + def f() -> bool: + return True + """, + """ + def f() -> bool: + return True + """, + ), + } + ) + def test_use_future_annotations(self, stub: str, before: str, after: str) -> None: + self.run_test_case_with_flags( + stub=stub, + before=before, + after=after, + use_future_annotations=True, + ) + @data_provider( { "test_counting_parameters_and_returns": ( @@ -1028,7 +1068,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): before: str, after: str, annotation_counts: AnnotationCounts, - any_changes: False, + any_changes_applied: False, ): stub = self.make_fixture_data(stub) before = self.make_fixture_data(before) @@ -1044,4 +1084,6 @@ class TestApplyAnnotationsVisitor(CodemodTest): self.assertEqual(after, output_code) self.assertEqual(str(annotation_counts), str(visitor.annotation_counts)) - self.assertEqual(any_changes, visitor.annotation_counts.any_changes()) + self.assertEqual( + any_changes_applied, visitor.annotation_counts.any_changes_applied() + ) From 3ccfc4adc82b8354c1c998b0abd1ded3d690a1e8 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Tue, 2 Nov 2021 15:06:45 -0400 Subject: [PATCH 145/632] Fix typing errors on ATAV (#542) --- .../visitors/_apply_type_annotations.py | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index cc53d656..da1679ca 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -76,7 +76,7 @@ class TypeCollector(cst.CSTVisitor): value = base.value if isinstance(value, NAME_OR_ATTRIBUTE): new_value = self._handle_NameOrAttribute(value) - elif isinstance(base.value, cst.Subscript): + elif isinstance(value, cst.Subscript): new_value = self._handle_Subscript(value) else: start = self.get_metadata(PositionProvider, node).start @@ -121,21 +121,23 @@ class TypeCollector(cst.CSTVisitor): self.qualifier.pop() def _get_unique_qualified_name(self, node: cst.CSTNode) -> str: + name = None names = [q.name for q in self.get_metadata(QualifiedNameProvider, node)] if len(names) == 0: # we hit this branch if the stub is directly using a fully # qualified name, which is not technically valid python but is # convenient to allow. - return get_full_name_for_node(node) - elif len(names) == 1: - return names[0] - else: + name = get_full_name_for_node(node) + elif len(names) == 1 and isinstance(names[0], str): + name = names[0] + if name is None: start = self.get_metadata(PositionProvider, node).start raise ValueError( "Could not resolve a unique qualified name for type " + f"{get_full_name_for_node(node)} at {start.line}:{start.column}. " + f"Candidate names were: {names!r}" ) + return name def _get_qualified_name_and_dequalified_node( self, @@ -161,6 +163,7 @@ class TypeCollector(cst.CSTVisitor): else: AddImportsVisitor.add_needed_import(self.context, module, target) return False + return False # Handler functions. # @@ -183,7 +186,7 @@ class TypeCollector(cst.CSTVisitor): else: return dequalified_node - def _handle_Index(self, slice: cst.Index, node: cst.Subscript) -> cst.Subscript: + def _handle_Index(self, slice: cst.Index, node: cst.Subscript) -> cst.Index: value = slice.value if isinstance(value, cst.Subscript): return slice.with_changes(value=self._handle_Subscript(value)) @@ -233,8 +236,10 @@ class TypeCollector(cst.CSTVisitor): return annotation elif isinstance(node, cst.Subscript): return cst.Annotation(annotation=self._handle_Subscript(node)) - else: + elif isinstance(node, NAME_OR_ATTRIBUTE): return cst.Annotation(annotation=self._handle_NameOrAttribute(node)) + else: + raise ValueError(f"Unexpected annotation node: {node}") def _handle_Parameters(self, parameters: cst.Parameters) -> cst.Parameters: def update_annotations(parameters: Sequence[cst.Param]) -> List[cst.Param]: @@ -266,7 +271,7 @@ class AnnotationCounts: return_annotations: int = 0 classes_added: int = 0 - def any_changes_applied(self): + def any_changes_applied(self) -> bool: return ( self.global_annotations + self.attribute_annotations @@ -601,12 +606,13 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): function_annotation = self.annotations.function_annotations[key] # Only add new annotation if explicitly told to overwrite existing # annotations or if one doesn't already exist. - set_return_annotation = not updated_node.returns or ( - self.overwrite_existing_annotations and function_annotation.returns + set_return_annotation = ( + self.overwrite_existing_annotations or updated_node.returns is None ) - if set_return_annotation: + if set_return_annotation and function_annotation.returns is not None: updated_node = self._apply_annotation_to_return( - function_def=updated_node, annotation=function_annotation.returns + function_def=updated_node, + annotation=function_annotation.returns, ) # Don't override default values when annotating functions new_parameters = self._update_parameters(function_annotation, updated_node) From 3dbcf5fed75fc97fd089e42c676e54a8e15124b3 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Mon, 8 Nov 2021 15:34:24 -0500 Subject: [PATCH 146/632] Don't gather metadata if the wrapper already contains it (#545) Make sure that the MetadataWrapper to resolves the requested providers vs the existing metadata results and prevent a single provider from being invoked multiple times. --- .../metadata/tests/test_metadata_wrapper.py | 56 ++++++++++++++++++- libcst/metadata/wrapper.py | 5 +- 2 files changed, 57 insertions(+), 4 deletions(-) diff --git a/libcst/metadata/tests/test_metadata_wrapper.py b/libcst/metadata/tests/test_metadata_wrapper.py index 731a910f..36fe7d5b 100644 --- a/libcst/metadata/tests/test_metadata_wrapper.py +++ b/libcst/metadata/tests/test_metadata_wrapper.py @@ -5,9 +5,14 @@ from typing import Optional +from unittest.mock import Mock import libcst as cst -from libcst.metadata import BatchableMetadataProvider, MetadataWrapper +from libcst.metadata import ( + BatchableMetadataProvider, + MetadataWrapper, + VisitorMetadataProvider, +) from libcst.testing.utils import UnitTest @@ -73,3 +78,52 @@ class MetadataWrapperTest(UnitTest): self.assertEqual( mw.resolve(SimpleCacheMetadataProvider)[pass_node], cached_data ) + + def test_resolve_provider_twice(self) -> None: + """ + Tests that resolving the same provider twice is a no-op + """ + mock = Mock() + + class ProviderA(VisitorMetadataProvider[bool]): + def visit_Pass(self, node: cst.Pass) -> None: + mock.visited_a() + + module = cst.parse_module("pass") + wrapper = MetadataWrapper(module) + + wrapper.resolve(ProviderA) + mock.visited_a.assert_called_once() + + wrapper.resolve(ProviderA) + mock.visited_a.assert_called_once() + + def test_resolve_dependent_provider_twice(self) -> None: + """ + Tests that resolving the same provider twice is a no-op + """ + mock = Mock() + + class ProviderA(VisitorMetadataProvider[bool]): + def visit_Pass(self, node: cst.Pass) -> None: + mock.visited_a() + + class ProviderB(VisitorMetadataProvider[bool]): + METADATA_DEPENDENCIES = (ProviderA,) + + def visit_Pass(self, node: cst.Pass) -> None: + mock.visited_b() + + module = cst.parse_module("pass") + wrapper = MetadataWrapper(module) + + wrapper.resolve(ProviderA) + mock.visited_a.assert_called_once() + + wrapper.resolve(ProviderB) + mock.visited_a.assert_called_once() + mock.visited_b.assert_called_once() + + wrapper.resolve(ProviderA) + mock.visited_a.assert_called_once() + mock.visited_b.assert_called_once() diff --git a/libcst/metadata/wrapper.py b/libcst/metadata/wrapper.py index 9ff9b4a0..cbba22f5 100644 --- a/libcst/metadata/wrapper.py +++ b/libcst/metadata/wrapper.py @@ -73,10 +73,9 @@ def _resolve_impl( Updates the _metadata map on wrapper with metadata from the given providers as well as their dependencies. """ - providers = set(providers) - set(wrapper._metadata.keys()) - remaining = _gather_providers(providers, set()) + completed = set(wrapper._metadata.keys()) + remaining = _gather_providers(set(providers), set()) - completed - completed = set() while len(remaining) > 0: batchable = set() From 182586cf5b55073af138b53077631201bd41ac18 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 10 Nov 2021 02:33:57 -0800 Subject: [PATCH 147/632] Support relative imports in ATAV qualifier handling (#538) Based on diff review of https://github.com/Instagram/LibCST/pull/536, I investigated relatvie import handling and realized that with minor changes we can now handle them correctly. Relative imports aren't likely in code coming from an automated tool, but they could happen in hand-written stubs if anyone tries to use this codemod tool to merge stubs with code. Added a new test: ``` > python -m unittest libcst.codemod.visitors.tests.test_apply_type_annotations ............................................. ---------------------------------------------------------------------- Ran 45 tests in 2.195s OK ``` --- .../visitors/_apply_type_annotations.py | 19 +++++++++++--- .../tests/test_apply_type_annotations.py | 26 +++++++++++++++++++ 2 files changed, 42 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index da1679ca..db9df20c 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -147,15 +147,28 @@ class TypeCollector(cst.CSTVisitor): dequalified_node = node.attr if isinstance(node, cst.Attribute) else node return qualified_name, dequalified_node + def _module_and_target(self, qualified_name: str) -> Tuple[str, str]: + relative_prefix = "" + while qualified_name.startswith("."): + relative_prefix += "." + qualified_name = qualified_name[1:] + split = qualified_name.rsplit(".", 1) + if len(split) == 1: + qualifier, target = "", split[0] + else: + qualifier, target = split + return (relative_prefix + qualifier, target) + def _handle_qualification_and_should_qualify(self, qualified_name: str) -> bool: """ Basd on a qualified name and the existing module imports, record that we need to add an import if necessary and return whether or not we should use the qualified name due to a preexisting import. """ - split_name = qualified_name.split(".") - if len(split_name) > 1 and qualified_name not in self.existing_imports: - module, target = ".".join(split_name[:-1]), split_name[-1] + module, target = self._module_and_target(qualified_name) + if module in ("", "builtins"): + return False + elif qualified_name not in self.existing_imports: if module == "builtins": return False elif module in self.existing_imports: diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 2f97e37c..29652aa9 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -123,6 +123,32 @@ class TestApplyAnnotationsVisitor(CodemodTest): FOO: Union[Example, int] = bar() """, ), + "with_relative_imports": ( + """ + from .relative0 import T0 + from ..relative1 import T1 + from . import relative2 + + x0: typing.Optional[T0] + x1: typing.Optional[T1] + x2: typing.Optional[relative2.T2] + """, + """ + x0 = None + x1 = None + x2 = None + """, + """ + from ..relative1 import T1 + from .relative0 import T0 + from .relative2 import T2 + from typing import Optional + + x0: Optional[T0] = None + x1: Optional[T1] = None + x2: Optional[T2] = None + """, + ), } ) def test_annotate_globals(self, stub: str, before: str, after: str) -> None: From 70ff0f2182a7edc4990e6f85673324069e5fc785 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 10 Nov 2021 12:15:18 -0500 Subject: [PATCH 148/632] Upgrade black to fix errors in 3.9.8 (#547) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index bc7688bf..5bf3217b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -black==20.8b1 +black==21.10b0 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8 From 3e798726ec0a545cbf644dbc67c6bc3529abf5d3 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 17 Nov 2021 14:37:53 -0500 Subject: [PATCH 149/632] Pass absolute path to codemod test (#549) --- libcst/codemod/tests/test_codemod_cli.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 74f1c174..7daa7056 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -7,6 +7,7 @@ import subprocess import sys +from pathlib import Path from libcst.testing.utils import UnitTest @@ -20,7 +21,7 @@ class TestCodemodCLI(UnitTest): "libcst.tool", "codemod", "remove_unused_imports.RemoveUnusedImportsCommand", - "libcst/codemod/tests/codemod_formatter_error_input.py.txt", + str(Path(__file__).parent / "codemod_formatter_error_input.py.txt"), ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, From 7db6ec538417184c855413bb0a39e6ccedc887e8 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 17 Nov 2021 15:08:52 -0500 Subject: [PATCH 150/632] Swallow parsing errors in string annotations. (#548) * Swallow parsing errors in string annotations. This is the same behavior as cPython. I've also rewritten the test that was relying on this exception to check where type parsing was happening * Fix pyre error --- libcst/metadata/scope_provider.py | 9 +- libcst/metadata/tests/test_scope_provider.py | 93 +++++++++----------- 2 files changed, 50 insertions(+), 52 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 821855c9..33b218c2 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -898,8 +898,13 @@ class ScopeVisitor(cst.CSTVisitor): top_level_annotation = self.__last_string_annotation is None if top_level_annotation: self.__last_string_annotation = node - mod = cst.parse_module(value) - mod.visit(self) + try: + mod = cst.parse_module(value) + mod.visit(self) + except cst.ParserSyntaxError: + # swallow string annotation parsing errors + # this is the same behavior as cPython + pass if top_level_annotation: self.__last_string_annotation = None return True diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 8a1bf4b3..b5c8c6da 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -7,6 +7,7 @@ import sys from textwrap import dedent from typing import Mapping, Tuple, cast +from unittest import mock import libcst as cst from libcst import ensure_type @@ -1711,61 +1712,53 @@ class ScopeProviderTest(UnitTest): ) def test_cast(self) -> None: - with self.assertRaises(cst.ParserSyntaxError): - m, scopes = get_scope_metadata_provider( - """ - from typing import TypeVar - TypeVar("Name", "3rr0r") - """ - ) + def assert_parsed(code, *calls): + parse = cst.parse_module + with mock.patch("libcst.parse_module") as parse_mock: + parse_mock.side_effect = parse + get_scope_metadata_provider(dedent(code)) + calls = [mock.call(dedent(code))] + list(calls) + self.assertEqual(parse_mock.call_count, len(calls)) + parse_mock.assert_has_calls(calls) - try: - m, scopes = get_scope_metadata_provider( - """ - from typing import TypeVar - TypeVar("3rr0r", "int") - """ - ) - except cst.ParserSyntaxError: - self.fail( - "First string argument of NewType and TypeVar should not be parsed" - ) + assert_parsed( + """ + from typing import TypeVar + TypeVar("Name", "int") + """, + mock.call("int"), + ) - with self.assertRaises(cst.ParserSyntaxError): - m, scopes = get_scope_metadata_provider( - """ - from typing import Dict - Dict["str", "3rr0r"] - """ - ) + assert_parsed( + """ + from typing import Dict + Dict["str", "int"] + """, + mock.call("str"), + mock.call("int"), + ) - try: - m, scopes = get_scope_metadata_provider( - """ - from typing import Dict, cast - cast(Dict[str, str], {})["3rr0r"] - """ - ) - except cst.ParserSyntaxError: - self.fail("Subscript of function calls should not be parsed") + assert_parsed( + """ + from typing import Dict, cast + cast(Dict[str, str], {})["3rr0r"] + """ + ) - try: - m, scopes = get_scope_metadata_provider( - """ - from typing import cast - cast(str, "3rr0r") - """ - ) - except cst.ParserSyntaxError: - self.fail("String arguments of cast should not be parsed") + assert_parsed( + """ + from typing import cast + cast(str, "foo") + """, + ) - with self.assertRaises(cst.ParserSyntaxError): - m, scopes = get_scope_metadata_provider( - """ - from typing import cast - cast("3rr0r", "") - """ - ) + assert_parsed( + """ + from typing import cast + cast("int", "foo") + """, + mock.call("int"), + ) def test_builtin_scope(self) -> None: m, scopes = get_scope_metadata_provider( From 56386d7add0f3c78d1b36efee12e9d85ada2e2de Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 17 Nov 2021 16:12:02 -0500 Subject: [PATCH 151/632] Stop parsing string annotations when no longer in a typing call (#546) * Fix ScopeProvider when string type annotation is unparsable * Handle nested function calls w/in type declarations * Edit stack in place * Add unparsed test to test_cast --- libcst/metadata/scope_provider.py | 34 +++++++++++--------- libcst/metadata/tests/test_scope_provider.py | 26 ++++++++++++++- 2 files changed, 44 insertions(+), 16 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 33b218c2..f69974a7 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -789,10 +789,8 @@ class ScopeVisitor(cst.CSTVisitor): self.scope: Scope = GlobalScope() self.__deferred_accesses: List[DeferredAccess] = [] self.__top_level_attribute_stack: List[Optional[cst.Attribute]] = [None] - self.__in_annotation: Set[ - Union[cst.Call, cst.Annotation, cst.Subscript] - ] = set() - self.__in_type_hint: Set[Union[cst.Call, cst.Annotation, cst.Subscript]] = set() + self.__in_annotation_stack: List[bool] = [False] + self.__in_type_hint_stack: List[bool] = [False] self.__in_ignored_subscript: Set[cst.Subscript] = set() self.__last_string_annotation: Optional[cst.BaseString] = None self.__ignore_annotation: int = 0 @@ -851,19 +849,21 @@ class ScopeVisitor(cst.CSTVisitor): def visit_Call(self, node: cst.Call) -> Optional[bool]: self.__top_level_attribute_stack.append(None) + self.__in_type_hint_stack.append(False) + self.__in_annotation_stack.append(False) qnames = {qn.name for qn in self.scope.get_qualified_names_for(node)} if "typing.NewType" in qnames or "typing.TypeVar" in qnames: node.func.visit(self) - self.__in_type_hint.add(node) + self.__in_type_hint_stack[-1] = True for arg in node.args[1:]: arg.visit(self) return False if "typing.cast" in qnames: node.func.visit(self) if len(node.args) > 0: - self.__in_type_hint.add(node) + self.__in_type_hint_stack.append(True) node.args[0].visit(self) - self.__in_type_hint.discard(node) + self.__in_type_hint_stack.pop() for arg in node.args[1:]: arg.visit(self) return False @@ -871,13 +871,14 @@ class ScopeVisitor(cst.CSTVisitor): def leave_Call(self, original_node: cst.Call) -> None: self.__top_level_attribute_stack.pop() - self.__in_type_hint.discard(original_node) + self.__in_type_hint_stack.pop() + self.__in_annotation_stack.pop() def visit_Annotation(self, node: cst.Annotation) -> Optional[bool]: - self.__in_annotation.add(node) + self.__in_annotation_stack.append(True) def leave_Annotation(self, original_node: cst.Annotation) -> None: - self.__in_annotation.discard(original_node) + self.__in_annotation_stack.pop() def visit_SimpleString(self, node: cst.SimpleString) -> Optional[bool]: self._handle_string_annotation(node) @@ -891,7 +892,7 @@ class ScopeVisitor(cst.CSTVisitor): ) -> bool: """Returns whether it successfully handled the string annotation""" if ( - self.__in_type_hint or self.__in_annotation + self.__in_type_hint_stack[-1] or self.__in_annotation_stack[-1] ) and not self.__in_ignored_subscript: value = node.evaluated_value if value: @@ -911,16 +912,19 @@ class ScopeVisitor(cst.CSTVisitor): return False def visit_Subscript(self, node: cst.Subscript) -> Optional[bool]: + in_type_hint = False if isinstance(node.value, cst.Name): qnames = {qn.name for qn in self.scope.get_qualified_names_for(node.value)} if any(qn.startswith(("typing.", "typing_extensions.")) for qn in qnames): - self.__in_type_hint.add(node) + in_type_hint = True if "typing.Literal" in qnames or "typing_extensions.Literal" in qnames: self.__in_ignored_subscript.add(node) + + self.__in_type_hint_stack.append(in_type_hint) return True def leave_Subscript(self, original_node: cst.Subscript) -> None: - self.__in_type_hint.discard(original_node) + self.__in_type_hint_stack.pop() self.__in_ignored_subscript.discard(original_node) def visit_Name(self, node: cst.Name) -> Optional[bool]: @@ -933,9 +937,9 @@ class ScopeVisitor(cst.CSTVisitor): node, self.scope, is_annotation=bool( - self.__in_annotation and not self.__ignore_annotation + self.__in_annotation_stack[-1] and not self.__ignore_annotation ), - is_type_hint=bool(self.__in_type_hint), + is_type_hint=bool(self.__in_type_hint_stack[-1]), ) self.__deferred_accesses.append( DeferredAccess( diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index b5c8c6da..56dc0853 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1191,7 +1191,7 @@ class ScopeProviderTest(UnitTest): def test_insane_annotation_access(self) -> None: m, scopes = get_scope_metadata_provider( r""" - from typing import TypeVar + from typing import TypeVar, Optional from a import G TypeVar("G2", bound="Optional[\"G\"]") """ @@ -1760,6 +1760,30 @@ class ScopeProviderTest(UnitTest): mock.call("int"), ) + assert_parsed( + """ + from typing import TypeVar + TypeVar("Name", func("int")) + """, + ) + + assert_parsed( + """ + from typing import Literal + Literal[\"G\"] + """, + ) + + assert_parsed( + r""" + from typing import TypeVar, Optional + from a import G + TypeVar("G2", bound="Optional[\"G\"]") + """, + mock.call('Optional["G"]'), + mock.call("G"), + ) + def test_builtin_scope(self) -> None: m, scopes = get_scope_metadata_provider( """ From 5c05001c2d7f7565cbdd9aa13e90f74749acfb79 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 18 Nov 2021 04:03:27 -0500 Subject: [PATCH 152/632] Publish test packages (#550) --- .github/workflows/build.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 71b9f405..4990031c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -188,3 +188,10 @@ jobs: --sdist --wheel --outdir dist/ + - if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} + name: Publish distribution 📦 to Test PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_API_TOKEN }} + repository_url: https://test.pypi.org/legacy/ From 2cd43c4b7817a9fd7f92f8e5e6797be558802d01 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 18 Nov 2021 15:00:06 +0000 Subject: [PATCH 153/632] disable setuptools local scheme when env var is set (#551) --- .github/workflows/build.yml | 4 ++++ setup.py | 12 +++++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4990031c..4ecbea3c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -181,6 +181,10 @@ jobs: - name: Validate Dependencies if: steps.cache.outputs.cache-hit != 'true' run: exit 1 + - name: Disable scmtools local scheme + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} + run: >- + echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build a binary wheel and a source tarball run: >- python -m diff --git a/setup.py b/setup.py index f083f31b..593acd7b 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. -from os import path +from os import path, environ import setuptools @@ -13,9 +13,19 @@ this_directory: str = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.rst"), encoding="utf-8") as f: long_description: str = f.read() + +def no_local_scheme(version: str) -> str: + return "" + + setuptools.setup( use_scm_version={ "write_to": "libcst/_version.py", + **( + {"local_scheme": no_local_scheme} + if "LIBCST_NO_LOCAL_SCHEME" in environ + else {} + ), }, name="libcst", description="A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7 and 3.8 programs.", From 463f15e8050a31a1ca1b36917de7f83e79a8ad8a Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 18 Nov 2021 17:08:19 -0500 Subject: [PATCH 154/632] Test overwriting imports w/ global and nonlocal stmts (#553) --- libcst/metadata/tests/test_scope_provider.py | 130 ++++++++++++------- 1 file changed, 81 insertions(+), 49 deletions(-) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 56dc0853..9d5e3892 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -478,31 +478,39 @@ class ScopeProviderTest(UnitTest): self.assertIs(scopes[inner_for_in.target], scope_of_list_comp) def test_global_scope_overwrites(self) -> None: - m, scopes = get_scope_metadata_provider( + codes = ( """ class Cls: def f(): global var var = ... + """, """ + class Cls: + def f(): + global var + import f as var + """, ) - scope_of_module = scopes[m] - self.assertIsInstance(scope_of_module, GlobalScope) - self.assertTrue("var" in scope_of_module) + for code in codes: + m, scopes = get_scope_metadata_provider(code) + scope_of_module = scopes[m] + self.assertIsInstance(scope_of_module, GlobalScope) + self.assertTrue("var" in scope_of_module) - cls = ensure_type(m.body[0], cst.ClassDef) - scope_of_cls = scopes[cls.body.body[0]] - self.assertIsInstance(scope_of_cls, ClassScope) - self.assertTrue("var" in scope_of_cls) + cls = ensure_type(m.body[0], cst.ClassDef) + scope_of_cls = scopes[cls.body.body[0]] + self.assertIsInstance(scope_of_cls, ClassScope) + self.assertTrue("var" in scope_of_cls) - f = ensure_type(cls.body.body[0], cst.FunctionDef) - scope_of_f = scopes[f.body.body[0]] - self.assertIsInstance(scope_of_f, FunctionScope) - self.assertTrue("var" in scope_of_f) - self.assertEqual(scope_of_f["var"], scope_of_module["var"]) + f = ensure_type(cls.body.body[0], cst.FunctionDef) + scope_of_f = scopes[f.body.body[0]] + self.assertIsInstance(scope_of_f, FunctionScope) + self.assertTrue("var" in scope_of_f) + self.assertEqual(scope_of_f["var"], scope_of_module["var"]) def test_nonlocal_scope_overwrites(self) -> None: - m, scopes = get_scope_metadata_provider( + codes = ( """ def outer_f(): var = ... @@ -511,46 +519,70 @@ class ScopeProviderTest(UnitTest): def inner_f(): nonlocal var var = ... + """, """ + def outer_f(): + import f as var + class Cls: + var = ... + def inner_f(): + nonlocal var + var = ... + """, + """ + def outer_f(): + var = ... + class Cls: + var = ... + def inner_f(): + nonlocal var + import f as var + """, ) - scope_of_module = scopes[m] - self.assertIsInstance(scope_of_module, GlobalScope) - self.assertTrue("var" not in scope_of_module) + for code in codes: + m, scopes = get_scope_metadata_provider(code) + scope_of_module = scopes[m] + self.assertIsInstance(scope_of_module, GlobalScope) + self.assertTrue("var" not in scope_of_module) - outer_f = ensure_type(m.body[0], cst.FunctionDef) - outer_f_body_var_assign = ensure_type( - ensure_type(outer_f.body.body[0], cst.SimpleStatementLine).body[0], - cst.Assign, - ) - scope_of_outer_f = scopes[outer_f_body_var_assign] - self.assertIsInstance(scope_of_outer_f, FunctionScope) - self.assertTrue("var" in scope_of_outer_f) - self.assertEqual(len(scope_of_outer_f["var"]), 2) + outer_f = ensure_type(m.body[0], cst.FunctionDef) + outer_f_body_var = ensure_type( + ensure_type(outer_f.body.body[0], cst.SimpleStatementLine).body[0], + cst.CSTNode, + ) + scope_of_outer_f = scopes[outer_f_body_var] + self.assertIsInstance(scope_of_outer_f, FunctionScope) + self.assertTrue("var" in scope_of_outer_f) + self.assertEqual(len(scope_of_outer_f["var"]), 2) - cls = ensure_type(outer_f.body.body[1], cst.ClassDef) - scope_of_cls = scopes[cls.body.body[0]] - self.assertIsInstance(scope_of_cls, ClassScope) - self.assertTrue("var" in scope_of_cls) + cls = ensure_type(outer_f.body.body[1], cst.ClassDef) + scope_of_cls = scopes[cls.body.body[0]] + self.assertIsInstance(scope_of_cls, ClassScope) + self.assertTrue("var" in scope_of_cls) - inner_f = ensure_type(cls.body.body[1], cst.FunctionDef) - inner_f_body_var_assign = ensure_type( - ensure_type(inner_f.body.body[1], cst.SimpleStatementLine).body[0], - cst.Assign, - ) - scope_of_inner_f = scopes[inner_f_body_var_assign] - self.assertIsInstance(scope_of_inner_f, FunctionScope) - self.assertTrue("var" in scope_of_inner_f) - self.assertEqual(len(scope_of_inner_f["var"]), 2) - self.assertEqual( - { - cast(Assignment, assignment).node - for assignment in scope_of_outer_f["var"] - }, - { - outer_f_body_var_assign.targets[0].target, - inner_f_body_var_assign.targets[0].target, - }, - ) + inner_f = ensure_type(cls.body.body[1], cst.FunctionDef) + inner_f_body_var = ensure_type( + ensure_type(inner_f.body.body[1], cst.SimpleStatementLine).body[0], + cst.CSTNode, + ) + scope_of_inner_f = scopes[inner_f_body_var] + self.assertIsInstance(scope_of_inner_f, FunctionScope) + self.assertTrue("var" in scope_of_inner_f) + self.assertEqual(len(scope_of_inner_f["var"]), 2) + self.assertEqual( + { + cast(Assignment, assignment).node + for assignment in scope_of_outer_f["var"] + }, + { + outer_f_body_var.targets[0].target + if isinstance(outer_f_body_var, cst.Assign) + else outer_f_body_var, + inner_f_body_var.targets[0].target + if isinstance(inner_f_body_var, cst.Assign) + else inner_f_body_var, + }, + ) def test_local_scope_shadowing_with_functions(self) -> None: m, scopes = get_scope_metadata_provider( From ae8d0cda2fec69c81866df47d14d810c778fd3fc Mon Sep 17 00:00:00 2001 From: Giorgi Megreli Date: Fri, 19 Nov 2021 11:20:47 +0000 Subject: [PATCH 155/632] Pin accesses to import alias node (#554) * Add ImportAssignment class and record it from Scope * Add overrides for LocalScope and ClassScope * Clean scope_provider code and use ImportAssignment class in `unusued_imports` codemod * Add missing types * Fix fixit errors --- .../visitors/_gather_unused_imports.py | 3 +- libcst/metadata/__init__.py | 2 + libcst/metadata/scope_provider.py | 66 +++++++++++++++---- libcst/metadata/tests/test_scope_provider.py | 43 +++++++++--- 4 files changed, 90 insertions(+), 24 deletions(-) diff --git a/libcst/codemod/visitors/_gather_unused_imports.py b/libcst/codemod/visitors/_gather_unused_imports.py index 89f37844..2dc439ac 100644 --- a/libcst/codemod/visitors/_gather_unused_imports.py +++ b/libcst/codemod/visitors/_gather_unused_imports.py @@ -134,8 +134,7 @@ class GatherUnusedImportsVisitor(ContextAwareVisitor): for assignment in scope[name_or_alias]: if ( - isinstance(assignment, cst.metadata.Assignment) - and isinstance(assignment.node, (cst.ImportFrom, cst.Import)) + isinstance(assignment, cst.metadata.ImportAssignment) and len(assignment.references) > 0 ): return True diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index 55a48cb5..0603f098 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -41,6 +41,7 @@ from libcst.metadata.scope_provider import ( ComprehensionScope, FunctionScope, GlobalScope, + ImportAssignment, QualifiedName, QualifiedNameSource, Scope, @@ -63,6 +64,7 @@ __all__ = [ "BaseAssignment", "Assignment", "BuiltinAssignment", + "ImportAssignment", "BuiltinScope", "Access", "Scope", diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index f69974a7..e3590f0c 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -208,6 +208,23 @@ class BuiltinAssignment(BaseAssignment): pass +class ImportAssignment(Assignment): + """An assignment records the import node and it's alias""" + + as_name: cst.CSTNode + + def __init__( + self, + name: str, + scope: "Scope", + node: cst.CSTNode, + index: int, + as_name: cst.CSTNode, + ): + super().__init__(name, scope, node, index) + self.as_name = as_name + + class Assignments: """A container to provide all assignments in a scope.""" @@ -401,10 +418,23 @@ class Scope(abc.ABC): self._assignment_count = 0 def record_assignment(self, name: str, node: cst.CSTNode) -> None: - self._assignments[name].add( + self._add_assignment( Assignment(name=name, scope=self, node=node, index=self._assignment_count) ) + def record_import_assignment( + self, name: str, node: cst.CSTNode, as_name: cst.CSTNode + ) -> None: + self._add_assignment( + ImportAssignment( + name=name, + scope=self, + node=node, + as_name=as_name, + index=self._assignment_count, + ) + ) + def record_access(self, name: str, access: Access) -> None: self._accesses[name].add(access) @@ -416,9 +446,13 @@ class Scope(abc.ABC): """Overridden by ClassScope to hide it's assignments from child scopes.""" return name in self - def _record_assignment_as_parent(self, name: str, node: cst.CSTNode) -> None: + def _add_assignment(self, assignment: "BaseAssignment") -> None: + assignment.scope = self + self._assignments[assignment.name].add(assignment) + + def _add_assignment_as_parent(self, assignment: "BaseAssignment") -> None: """Overridden by ClassScope to forward 'nonlocal' assignments from child scopes.""" - self.record_assignment(name, node) + self._add_assignment(assignment) @abc.abstractmethod def __contains__(self, name: str) -> bool: @@ -575,15 +609,15 @@ class BuiltinScope(Scope): return self._assignments[name] return set() - def record_assignment(self, name: str, node: cst.CSTNode) -> None: - raise NotImplementedError("assignments in builtin scope are not allowed") - def record_global_overwrite(self, name: str) -> None: raise NotImplementedError("global overwrite in builtin scope are not allowed") def record_nonlocal_overwrite(self, name: str) -> None: raise NotImplementedError("declarations in builtin scope are not allowed") + def _add_assignment(self, assignment: "BaseAssignment") -> None: + raise NotImplementedError("assignments in builtin scope are not allowed") + class GlobalScope(Scope): """ @@ -634,11 +668,13 @@ class LocalScope(Scope, abc.ABC): def record_nonlocal_overwrite(self, name: str) -> None: self._scope_overwrites[name] = self.parent - def record_assignment(self, name: str, node: cst.CSTNode) -> None: - if name in self._scope_overwrites: - self._scope_overwrites[name]._record_assignment_as_parent(name, node) + def _add_assignment(self, assignment: "BaseAssignment") -> None: + if assignment.name in self._scope_overwrites: + self._scope_overwrites[assignment.name]._add_assignment_as_parent( + assignment + ) else: - super().record_assignment(name, node) + super()._add_assignment(assignment) def __contains__(self, name: str) -> bool: if name in self._scope_overwrites: @@ -671,7 +707,7 @@ class ClassScope(LocalScope): When a class is defined, it creates a ClassScope. """ - def _record_assignment_as_parent(self, name: str, node: cst.CSTNode) -> None: + def _add_assignment_as_parent(self, assignment: "BaseAssignment") -> None: """ Forward the assignment to parent. @@ -686,7 +722,7 @@ class ClassScope(LocalScope): # hidden from its children. """ - self.parent._record_assignment_as_parent(name, node) + self.parent._add_assignment_as_parent(assignment) def _getitem_from_self_or_parent(self, name: str) -> Set[BaseAssignment]: """ @@ -826,11 +862,15 @@ class ScopeVisitor(cst.CSTVisitor): asname = name.asname if asname is not None: name_values = _gen_dotted_names(cst.ensure_type(asname.name, cst.Name)) + import_node_asname = asname.name else: name_values = _gen_dotted_names(name.name) + import_node_asname = name.name for name_value, _ in name_values: - self.scope.record_assignment(name_value, node) + self.scope.record_import_assignment( + name_value, node, import_node_asname + ) return False def visit_Import(self, node: cst.Import) -> Optional[bool]: diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 9d5e3892..85d9266b 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -14,6 +14,7 @@ from libcst import ensure_type from libcst.metadata import MetadataWrapper from libcst.metadata.scope_provider import ( Assignment, + ImportAssignment, BuiltinAssignment, BuiltinScope, ClassScope, @@ -192,17 +193,24 @@ class ScopeProviderTest(UnitTest): len(scope_of_module[in_scope]), 1, f"{in_scope} should be in scope." ) - assignment = cast(Assignment, list(scope_of_module[in_scope])[0]) + assignment = cast(ImportAssignment, list(scope_of_module[in_scope])[0]) self.assertEqual( assignment.name, in_scope, - f"Assignment name {assignment.name} should equal to {in_scope}.", + f"ImportAssignment name {assignment.name} should equal to {in_scope}.", ) import_node = ensure_type(m.body[idx], cst.SimpleStatementLine).body[0] self.assertEqual( assignment.node, import_node, - f"The node of Assignment {assignment.node} should equal to {import_node}", + f"The node of ImportAssignment {assignment.node} should equal to {import_node}", + ) + alias = import_node.names[0] + as_name = alias.asname.name if alias.asname else alias.name + self.assertEqual( + assignment.as_name, + as_name, + f"The alias name of ImportAssignment {assignment.as_name} should equal to {as_name}", ) def test_dotted_import_access(self) -> None: @@ -221,7 +229,7 @@ class ScopeProviderTest(UnitTest): self.assertTrue("a" in scope_of_module) self.assertEqual(scope_of_module.accesses["a"], set()) - a_b_c_assignment = cast(Assignment, list(scope_of_module["a.b.c"])[0]) + a_b_c_assignment = cast(ImportAssignment, list(scope_of_module["a.b.c"])[0]) a_b_c_access = list(a_b_c_assignment.references)[0] self.assertEqual(scope_of_module.accesses["a.b.c"], {a_b_c_access}) self.assertEqual(a_b_c_access.node, call.func) @@ -261,7 +269,9 @@ class ScopeProviderTest(UnitTest): self.assertTrue("os.path" in scope_of_module) self.assertTrue("os" in scope_of_module) - os_path_join_assignment = cast(Assignment, list(scope_of_module["os.path"])[0]) + os_path_join_assignment = cast( + ImportAssignment, list(scope_of_module["os.path"])[0] + ) os_path_join_assignment_references = list(os_path_join_assignment.references) self.assertNotEqual(len(os_path_join_assignment_references), 0) os_path_join_access = os_path_join_assignment_references[0] @@ -289,21 +299,36 @@ class ScopeProviderTest(UnitTest): for alias in import_aliases: self.assertEqual(scopes[alias], scope_of_module) - for idx, in_scope in [(0, "a"), (0, "b_renamed"), (1, "c"), (2, "d")]: + for idx, in_scope, imported_object_idx in [ + (0, "a", 0), + (0, "b_renamed", 1), + (1, "c", 0), + (2, "d", 0), + ]: self.assertEqual( len(scope_of_module[in_scope]), 1, f"{in_scope} should be in scope." ) - import_assignment = cast(Assignment, list(scope_of_module[in_scope])[0]) + import_assignment = cast( + ImportAssignment, list(scope_of_module[in_scope])[0] + ) self.assertEqual( import_assignment.name, in_scope, - f"The name of Assignment {import_assignment.name} should equal to {in_scope}.", + f"The name of ImportAssignment {import_assignment.name} should equal to {in_scope}.", ) import_node = ensure_type(m.body[idx], cst.SimpleStatementLine).body[0] self.assertEqual( import_assignment.node, import_node, - f"The node of Assignment {import_assignment.node} should equal to {import_node}", + f"The node of ImportAssignment {import_assignment.node} should equal to {import_node}", + ) + + alias = import_node.names[imported_object_idx] + as_name = alias.asname.name if alias.asname else alias.name + self.assertEqual( + import_assignment.as_name, + as_name, + f"The alias name of ImportAssignment {import_assignment.as_name} should equal to {as_name}", ) for not_in_scope in ["foo", "bar", "foo.bar", "b"]: From 9732f5ec829bf9f1a9b7a92160ab7f76d0e066dc Mon Sep 17 00:00:00 2001 From: Giorgi Megreli Date: Fri, 19 Nov 2021 12:21:06 +0000 Subject: [PATCH 156/632] Correct and simplify logic of recording assignments (#556) --- libcst/metadata/scope_provider.py | 44 +++++++++++++++---------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index e3590f0c..ae829bc2 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -418,23 +418,33 @@ class Scope(abc.ABC): self._assignment_count = 0 def record_assignment(self, name: str, node: cst.CSTNode) -> None: - self._add_assignment( - Assignment(name=name, scope=self, node=node, index=self._assignment_count) + target = self._find_assignment_target(name) + target._assignments[name].add( + Assignment( + name=name, scope=target, node=node, index=target._assignment_count + ) ) def record_import_assignment( self, name: str, node: cst.CSTNode, as_name: cst.CSTNode ) -> None: - self._add_assignment( + target = self._find_assignment_target(name) + target._assignments[name].add( ImportAssignment( name=name, - scope=self, + scope=target, node=node, as_name=as_name, - index=self._assignment_count, + index=target._assignment_count, ) ) + def _find_assignment_target(self, name: str) -> "Scope": + return self + + def _find_assignment_target_parent(self, name: str) -> "Scope": + return self + def record_access(self, name: str, access: Access) -> None: self._accesses[name].add(access) @@ -446,14 +456,6 @@ class Scope(abc.ABC): """Overridden by ClassScope to hide it's assignments from child scopes.""" return name in self - def _add_assignment(self, assignment: "BaseAssignment") -> None: - assignment.scope = self - self._assignments[assignment.name].add(assignment) - - def _add_assignment_as_parent(self, assignment: "BaseAssignment") -> None: - """Overridden by ClassScope to forward 'nonlocal' assignments from child scopes.""" - self._add_assignment(assignment) - @abc.abstractmethod def __contains__(self, name: str) -> bool: """Check if the name str exist in current scope by ``name in scope``.""" @@ -615,7 +617,7 @@ class BuiltinScope(Scope): def record_nonlocal_overwrite(self, name: str) -> None: raise NotImplementedError("declarations in builtin scope are not allowed") - def _add_assignment(self, assignment: "BaseAssignment") -> None: + def _find_assignment_target(self, name: str) -> "Scope": raise NotImplementedError("assignments in builtin scope are not allowed") @@ -668,13 +670,11 @@ class LocalScope(Scope, abc.ABC): def record_nonlocal_overwrite(self, name: str) -> None: self._scope_overwrites[name] = self.parent - def _add_assignment(self, assignment: "BaseAssignment") -> None: - if assignment.name in self._scope_overwrites: - self._scope_overwrites[assignment.name]._add_assignment_as_parent( - assignment - ) + def _find_assignment_target(self, name: str) -> "Scope": + if name in self._scope_overwrites: + return self._scope_overwrites[name]._find_assignment_target_parent(name) else: - super()._add_assignment(assignment) + return super()._find_assignment_target(name) def __contains__(self, name: str) -> bool: if name in self._scope_overwrites: @@ -707,7 +707,7 @@ class ClassScope(LocalScope): When a class is defined, it creates a ClassScope. """ - def _add_assignment_as_parent(self, assignment: "BaseAssignment") -> None: + def _find_assignment_target_parent(self, name: str) -> "Scope": """ Forward the assignment to parent. @@ -722,7 +722,7 @@ class ClassScope(LocalScope): # hidden from its children. """ - self.parent._add_assignment_as_parent(assignment) + return self.parent._find_assignment_target_parent(name) def _getitem_from_self_or_parent(self, name: str) -> Set[BaseAssignment]: """ From c48cc2101a8254742babd777eec9e5f321cb6d67 Mon Sep 17 00:00:00 2001 From: Giorgi Megreli Date: Fri, 19 Nov 2021 14:58:50 +0000 Subject: [PATCH 157/632] Move find_qualified_names_for in the Assignment class. (#557) Move _NameUtil.find_qualified_name_for ... method inside Assignment classes. --- libcst/metadata/scope_provider.py | 216 ++++++++++++++---------------- 1 file changed, 100 insertions(+), 116 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index ae829bc2..9e6556ce 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -128,6 +128,23 @@ class Access: self.__assignments |= previous_assignments +class QualifiedNameSource(Enum): + IMPORT = auto() + BUILTIN = auto() + LOCAL = auto() + + +@add_slots +@dataclass(frozen=True) +class QualifiedName: + #: Qualified name, e.g. ``a.b.c`` or ``fn..var``. + name: str + + #: Source of the name, either :attr:`QualifiedNameSource.IMPORT`, :attr:`QualifiedNameSource.BUILTIN` + #: or :attr:`QualifiedNameSource.LOCAL`. + source: QualifiedNameSource + + class BaseAssignment(abc.ABC): """Abstract base class of :class:`Assignment` and :class:`BuitinAssignment`.""" @@ -175,6 +192,10 @@ class BaseAssignment(abc.ABC): """Return an integer that represents the order of assignments in `scope`""" return -1 + @abc.abstractmethod + def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: + ... + class Assignment(BaseAssignment): """An assignment records the name, CSTNode and its accesses.""" @@ -195,6 +216,26 @@ class Assignment(BaseAssignment): def _index(self) -> int: return self.__index + def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: + scope = self.scope + name_prefixes = [] + while scope: + if isinstance(scope, ClassScope): + name_prefixes.append(scope.name) + elif isinstance(scope, FunctionScope): + name_prefixes.append(f"{scope.name}.") + elif isinstance(scope, ComprehensionScope): + name_prefixes.append("") + elif not isinstance(scope, (GlobalScope, BuiltinScope)): + raise Exception(f"Unexpected Scope: {scope}") + + scope = scope.parent if scope.parent != scope else None + + parts = [*reversed(name_prefixes)] + if full_name: + parts.append(full_name) + return {QualifiedName(".".join(parts), QualifiedNameSource.LOCAL)} + # even though we don't override the constructor. class BuiltinAssignment(BaseAssignment): @@ -205,7 +246,8 @@ class BuiltinAssignment(BaseAssignment): `types `_. """ - pass + def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: + return {QualifiedName(f"builtins.{self.name}", QualifiedNameSource.BUILTIN)} class ImportAssignment(Assignment): @@ -224,6 +266,55 @@ class ImportAssignment(Assignment): super().__init__(name, scope, node, index) self.as_name = as_name + def get_module_name_for_import(self) -> str: + module = "" + if isinstance(self.node, cst.ImportFrom): + module_attr = self.node.module + relative = self.node.relative + if module_attr: + module = get_full_name_for_node(module_attr) or "" + if relative: + module = "." * len(relative) + module + return module + + def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: + module = self.get_module_name_for_import() + results = set() + import_names = self.node.names + if not isinstance(import_names, cst.ImportStar): + for name in import_names: + real_name = get_full_name_for_node(name.name) + if not real_name: + continue + # real_name can contain `.` for dotted imports + # for these we want to find the longest prefix that matches full_name + parts = real_name.split(".") + real_names = [".".join(parts[:i]) for i in range(len(parts), 0, -1)] + for real_name in real_names: + as_name = real_name + if module and module.endswith("."): + # from . import a + # real_name should be ".a" + real_name = f"{module}{real_name}" + elif module: + real_name = f"{module}.{real_name}" + if name and name.asname: + eval_alias = name.evaluated_alias + if eval_alias is not None: + as_name = eval_alias + if full_name.startswith(as_name): + remaining_name = full_name.split(as_name, 1)[1].lstrip(".") + results.add( + QualifiedName( + f"{real_name}.{remaining_name}" + if remaining_name + else real_name, + QualifiedNameSource.IMPORT, + ) + ) + break + return results + class Assignments: """A container to provide all assignments in a scope.""" @@ -269,23 +360,6 @@ class Accesses: return len(self[node]) > 0 -class QualifiedNameSource(Enum): - IMPORT = auto() - BUILTIN = auto() - LOCAL = auto() - - -@add_slots -@dataclass(frozen=True) -class QualifiedName: - #: Qualified name, e.g. ``a.b.c`` or ``fn..var``. - name: str - - #: Source of the name, either :attr:`QualifiedNameSource.IMPORT`, :attr:`QualifiedNameSource.BUILTIN` - #: or :attr:`QualifiedNameSource.LOCAL`. - source: QualifiedNameSource - - class _NameUtil: @staticmethod def get_name_for(node: Union[str, cst.CSTNode]) -> Optional[str]: @@ -302,84 +376,6 @@ class _NameUtil: return _NameUtil.get_name_for(node.name) return None - @staticmethod - def get_module_name_for_import_alike( - assignment_node: Union[cst.Import, cst.ImportFrom] - ) -> str: - module = "" - if isinstance(assignment_node, cst.ImportFrom): - module_attr = assignment_node.module - relative = assignment_node.relative - if module_attr: - module = get_full_name_for_node(module_attr) or "" - if relative: - module = "." * len(relative) + module - return module - - @staticmethod - def find_qualified_name_for_import_alike( - assignment_node: Union[cst.Import, cst.ImportFrom], full_name: str - ) -> Set[QualifiedName]: - module = _NameUtil.get_module_name_for_import_alike(assignment_node) - results = set() - import_names = assignment_node.names - if not isinstance(import_names, cst.ImportStar): - for name in import_names: - real_name = get_full_name_for_node(name.name) - if not real_name: - continue - # real_name can contain `.` for dotted imports - # for these we want to find the longest prefix that matches full_name - parts = real_name.split(".") - real_names = [".".join(parts[:i]) for i in range(len(parts), 0, -1)] - for real_name in real_names: - as_name = real_name - if module and module.endswith("."): - # from . import a - # real_name should be ".a" - real_name = f"{module}{real_name}" - elif module: - real_name = f"{module}.{real_name}" - if name and name.asname: - eval_alias = name.evaluated_alias - if eval_alias is not None: - as_name = eval_alias - if full_name.startswith(as_name): - remaining_name = full_name.split(as_name, 1)[1].lstrip(".") - results.add( - QualifiedName( - f"{real_name}.{remaining_name}" - if remaining_name - else real_name, - QualifiedNameSource.IMPORT, - ) - ) - break - return results - - @staticmethod - def find_qualified_name_for_non_import( - assignment: Assignment, remaining_name: str - ) -> Set[QualifiedName]: - scope = assignment.scope - name_prefixes = [] - while scope: - if isinstance(scope, ClassScope): - name_prefixes.append(scope.name) - elif isinstance(scope, FunctionScope): - name_prefixes.append(f"{scope.name}.") - elif isinstance(scope, ComprehensionScope): - name_prefixes.append("") - elif not isinstance(scope, (GlobalScope, BuiltinScope)): - raise Exception(f"Unexpected Scope: {scope}") - - scope = scope.parent if scope.parent != scope else None - - parts = [*reversed(name_prefixes)] - if remaining_name: - parts.append(remaining_name) - return {QualifiedName(".".join(parts), QualifiedNameSource.LOCAL)} - class Scope(abc.ABC): """ @@ -555,26 +551,14 @@ class Scope(abc.ABC): assignments = self[prefix] break for assignment in assignments: - if isinstance(assignment, Assignment): - assignment_node = assignment.node - if isinstance(assignment_node, (cst.Import, cst.ImportFrom)): - names = _NameUtil.find_qualified_name_for_import_alike( - assignment_node, full_name - ) - else: - names = _NameUtil.find_qualified_name_for_non_import( - assignment, full_name - ) - if not isinstance(node, str) and _is_assignment(node, assignment_node): - return names - else: - results |= names - elif isinstance(assignment, BuiltinAssignment): - results.add( - QualifiedName( - f"builtins.{assignment.name}", QualifiedNameSource.BUILTIN - ) - ) + names = assignment.get_qualified_names_for(full_name) + if ( + isinstance(assignment, Assignment) + and not isinstance(node, str) + and _is_assignment(node, assignment.node) + ): + return names + results |= names return results @property From d0f8fa97e28335bc744b4e77e1f0d56becf7e99b Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Fri, 19 Nov 2021 10:04:02 -0500 Subject: [PATCH 158/632] Remove remnants of circleci (#552) --- .circleci/.pyre_configuration | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 .circleci/.pyre_configuration diff --git a/.circleci/.pyre_configuration b/.circleci/.pyre_configuration deleted file mode 100644 index 7ffbe4d8..00000000 --- a/.circleci/.pyre_configuration +++ /dev/null @@ -1,12 +0,0 @@ -{ - "source_directories": [ - "." - ], - "search_path": [ - "stubs", "/tmp/libcst-env/lib/python3.7/site-packages" - ], - "exclude": [ - ".*/\\.tox/.*" - ], - "strict": true -} From 3895925f15c35571f4bd362d69884c865e57fec9 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 22 Nov 2021 14:06:19 +0000 Subject: [PATCH 159/632] bump version to 0.3.22 --- CHANGELOG.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ad6de4a..0d71c0bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,21 @@ +# 0.3.22 - 2021-11-22 + +## Added +- Add --indent-string option to `libcst.tool print` [#525](https://github.com/Instagram/LibCST/pull/525) +- Publish pre-release packages to test.pypi.org [#550](https://github.com/Instagram/LibCST/pull/550) +- Add ImportAssignment class extending Assignment to record assignments for import statements [#554](https://github.com/Instagram/LibCST/pull/554) + +## Fixed +- Various documentation fixes [#527](https://github.com/Instagram/LibCST/pull/527), [#529](https://github.com/Instagram/LibCST/pull/529) +- Do not add imports if we added no type info in ApplyTypeAnnotationVisitor [(commit)](https://github.com/Instagram/LibCST/commit/87625d02b6cb321c9c29ba1c67d81ce954a1a396) +- Support relative imports in ApplyTypeAnnotationVisitor qualifier handling [#538](https://github.com/Instagram/LibCST/pull/538) +- Don't gather metadata if the wrapper already contains it [#545](https://github.com/Instagram/LibCST/pull/545) +- Swallow parsing errors in string annotations [#548](https://github.com/Instagram/LibCST/pull/548) +- Stop parsing string annotations when no longer in a typing call [#546](https://github.com/Instagram/LibCST/pull/546) + +## Updated +- Move find_qualified_names_for in the Assignment class [#557](https://github.com/Instagram/LibCST/pull/557) + # 0.3.21 - 2021-09-21 ## Fixed From 58b447d8f7d8d82cce763bc7ffc503e62eef666f Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Tue, 23 Nov 2021 06:08:23 -0500 Subject: [PATCH 160/632] Fixes incorrectly missing annotations (#561) Co-authored-by: Zsolt Dollenstein --- libcst/metadata/scope_provider.py | 2 - libcst/metadata/tests/test_scope_provider.py | 173 +++++++++++-------- 2 files changed, 100 insertions(+), 75 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 9e6556ce..0c7df8d8 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -874,7 +874,6 @@ class ScopeVisitor(cst.CSTVisitor): def visit_Call(self, node: cst.Call) -> Optional[bool]: self.__top_level_attribute_stack.append(None) self.__in_type_hint_stack.append(False) - self.__in_annotation_stack.append(False) qnames = {qn.name for qn in self.scope.get_qualified_names_for(node)} if "typing.NewType" in qnames or "typing.TypeVar" in qnames: node.func.visit(self) @@ -896,7 +895,6 @@ class ScopeVisitor(cst.CSTVisitor): def leave_Call(self, original_node: cst.Call) -> None: self.__top_level_attribute_stack.pop() self.__in_type_hint_stack.pop() - self.__in_annotation_stack.pop() def visit_Annotation(self, node: cst.Annotation) -> Optional[bool]: self.__in_annotation_stack.append(True) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 85d9266b..4f84f439 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -6,7 +6,7 @@ import sys from textwrap import dedent -from typing import Mapping, Tuple, cast +from typing import Mapping, Tuple, cast, Sequence from unittest import mock import libcst as cst @@ -1768,78 +1768,87 @@ class ScopeProviderTest(UnitTest): ), ) - def test_cast(self) -> None: - def assert_parsed(code, *calls): - parse = cst.parse_module - with mock.patch("libcst.parse_module") as parse_mock: - parse_mock.side_effect = parse - get_scope_metadata_provider(dedent(code)) - calls = [mock.call(dedent(code))] + list(calls) - self.assertEqual(parse_mock.call_count, len(calls)) - parse_mock.assert_has_calls(calls) - - assert_parsed( - """ - from typing import TypeVar - TypeVar("Name", "int") - """, - mock.call("int"), - ) - - assert_parsed( - """ - from typing import Dict - Dict["str", "int"] - """, - mock.call("str"), - mock.call("int"), - ) - - assert_parsed( - """ - from typing import Dict, cast - cast(Dict[str, str], {})["3rr0r"] - """ - ) - - assert_parsed( - """ - from typing import cast - cast(str, "foo") - """, - ) - - assert_parsed( - """ - from typing import cast - cast("int", "foo") - """, - mock.call("int"), - ) - - assert_parsed( - """ - from typing import TypeVar - TypeVar("Name", func("int")) - """, - ) - - assert_parsed( - """ - from typing import Literal - Literal[\"G\"] - """, - ) - - assert_parsed( - r""" - from typing import TypeVar, Optional - from a import G - TypeVar("G2", bound="Optional[\"G\"]") - """, - mock.call('Optional["G"]'), - mock.call("G"), - ) + @data_provider( + { + "TypeVar": { + "code": """ + from typing import TypeVar + TypeVar("Name", "int") + """, + "calls": [mock.call("int")], + }, + "Dict": { + "code": """ + from typing import Dict + Dict["str", "int"] + """, + "calls": [mock.call("str"), mock.call("int")], + }, + "cast_no_annotation": { + "code": """ + from typing import Dict, cast + cast(Dict[str, str], {})["3rr0r"] + """, + "calls": [], + }, + "cast_second_arg": { + "code": """ + from typing import cast + cast(str, "foo") + """, + "calls": [], + }, + "cast_first_arg": { + "code": """ + from typing import cast + cast("int", "foo") + """, + "calls": [ + mock.call("int"), + ], + }, + "typevar_func": { + "code": """ + from typing import TypeVar + TypeVar("Name", func("int")) + """, + "calls": [], + }, + "literal": { + "code": """ + from typing import Literal + Literal[\"G\"] + """, + "calls": [], + }, + "nested_str": { + "code": r""" + from typing import TypeVar, Optional + from a import G + TypeVar("G2", bound="Optional[\"G\"]") + """, + "calls": [mock.call('Optional["G"]'), mock.call("G")], + }, + "class_self_ref": { + "code": """ + from typing import TypeVar + class HelperClass: + value: TypeVar("THelperClass", bound="HelperClass") + """, + "calls": [mock.call("HelperClass")], + }, + } + ) + def test_parse_string_annotations( + self, *, code: str, calls: Sequence[mock._Call] + ) -> None: + parse = cst.parse_module + with mock.patch("libcst.parse_module") as parse_mock: + parse_mock.side_effect = parse + get_scope_metadata_provider(dedent(code)) + calls = [mock.call(dedent(code))] + list(calls) + self.assertEqual(parse_mock.call_count, len(calls)) + parse_mock.assert_has_calls(calls) def test_builtin_scope(self) -> None: m, scopes = get_scope_metadata_provider( @@ -1907,3 +1916,21 @@ class ScopeProviderTest(UnitTest): global_pow_accesses = list(global_pow_assignment.references) self.assertEqual(len(global_pow_accesses), 2) + + def test_annotation_access_in_typevar_bound(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + from typing import TypeVar + class Test: + var: TypeVar("T", bound="Test") + """ + ) + imp = ensure_type( + ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.ImportFrom + ) + scope = scopes[imp] + assignment = list(scope["Test"])[0] + self.assertIsInstance(assignment, Assignment) + self.assertEqual(len(assignment.references), 1) + references = list(assignment.references) + self.assertTrue(references[0].is_annotation) From 9d611f9733194d9185272dfcb4414eabdf4f2931 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 23 Nov 2021 12:53:42 +0000 Subject: [PATCH 161/632] bump version to 0.3.23 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0d71c0bb..c68fc7d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +# 0.3.23 - 2021-11-23 + +## Fixed +- Fix missing string annotation references [#561](https://github.com/Instagram/LibCST/pull/561) + # 0.3.22 - 2021-11-22 ## Added From c02de9b718d461c7ff7ecdf0446cf440569eb2f9 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 21 Dec 2021 18:14:39 +0000 Subject: [PATCH 162/632] Implement a Python PEG parser in Rust (#566) This massive PR implements an alternative Python parser that will allow LibCST to parse Python 3.10's new grammar features. The parser is implemented in Rust, but it's turned off by default through the `LIBCST_PARSER_TYPE` environment variable. Set it to `native` to enable. The PR also enables new CI steps that test just the Rust parser, as well as steps that produce binary wheels for a variety of CPython versions and platforms. Note: this PR aims to be roughly feature-equivalent to the main branch, so it doesn't include new 3.10 syntax features. That will be addressed as a follow-up PR. The new parser is implemented in the `native/` directory, and is organized into two rust crates: `libcst_derive` contains some macros to facilitate various features of CST nodes, and `libcst` contains the `parser` itself (including the Python grammar), a `tokenizer` implementation by @bgw, and a very basic representation of CST `nodes`. Parsing is done by 1. **tokenizing** the input utf-8 string (bytes are not supported at the Rust layer, they are converted to utf-8 strings by the python wrapper) 2. running the **PEG parser** on the tokenized input, which also captures certain anchor tokens in the resulting syntax tree 3. using the anchor tokens to **inflate** the syntax tree into a proper CST Co-authored-by: Benjamin Woodruff --- .cargo/config.toml | 11 + .editorconfig | 6 +- .flake8 | 1 + .../workflows/.pyre_configuration.template | 3 + .github/workflows/build.yml | 95 +- .gitignore | 1 + LICENSE | 6 +- MANIFEST.in | 3 + libcst/_nodes/tests/test_atom.py | 3 + libcst/_nodes/tests/test_dict.py | 3 + libcst/_nodes/tests/test_funcdef.py | 3 + libcst/_nodes/tests/test_import.py | 4 +- libcst/_nodes/tests/test_list.py | 3 + libcst/_nodes/tests/test_matrix_multiply.py | 3 + libcst/_nodes/tests/test_module.py | 8 +- libcst/_nodes/tests/test_set.py | 3 + libcst/_nodes/tests/test_tuple.py | 38 +- libcst/_nodes/tests/test_with.py | 3 + libcst/_nodes/tests/test_yield.py | 3 + libcst/_parser/conversions/expression.py | 6 +- libcst/_parser/conversions/statement.py | 8 +- libcst/_parser/detect_config.py | 30 +- libcst/_parser/entrypoints.py | 41 +- libcst/_parser/parso/python/py_token.py | 48 + libcst/_parser/parso/python/token.py | 74 +- libcst/_parser/parso/python/tokenize.py | 9 +- libcst/_parser/py_whitespace_parser.py | 260 ++ libcst/_parser/tests/test_detect_config.py | 11 +- libcst/_parser/tests/test_footer_behavior.py | 5 +- libcst/_parser/tests/test_parse_errors.py | 4 +- .../_parser/tests/test_whitespace_parser.py | 11 +- libcst/_parser/tests/test_wrapped_tokenize.py | 8 +- libcst/_parser/types/config.py | 40 +- libcst/_parser/types/py_config.py | 54 + libcst/_parser/types/py_token.py | 27 + libcst/_parser/types/py_whitespace_state.py | 36 + libcst/_parser/types/token.py | 25 +- libcst/_parser/types/whitespace_state.py | 35 +- libcst/_parser/whitespace_parser.py | 271 +- libcst/_parser/wrapped_tokenize.py | 29 +- libcst/codegen/tests/test_codegen_clean.py | 6 +- libcst/codemod/tests/test_codemod_cli.py | 3 +- libcst/matchers/tests/test_extract.py | 8 +- libcst/tests/test_e2e.py | 2 + native/Cargo.lock | 884 +++++ native/Cargo.toml | 6 + native/libcst/Cargo.toml | 47 + native/libcst/Grammar | 707 ++++ native/libcst/README.md | 66 + native/libcst/benches/parser_benchmark.rs | 105 + native/libcst/src/bin.rs | 28 + native/libcst/src/lib.rs | 167 + native/libcst/src/nodes/codegen.rs | 65 + native/libcst/src/nodes/expression.rs | 2234 ++++++++++++ native/libcst/src/nodes/inflate_helpers.rs | 34 + native/libcst/src/nodes/macros.rs | 33 + native/libcst/src/nodes/mod.rs | 43 + native/libcst/src/nodes/module.rs | 92 + native/libcst/src/nodes/op.rs | 1420 ++++++++ native/libcst/src/nodes/parser_config.rs | 137 + native/libcst/src/nodes/py_cached.rs | 76 + native/libcst/src/nodes/statement.rs | 1986 +++++++++++ native/libcst/src/nodes/test_utils.rs | 42 + native/libcst/src/nodes/traits.rs | 62 + native/libcst/src/nodes/whitespace.rs | 167 + native/libcst/src/parser/errors.rs | 78 + native/libcst/src/parser/grammar.rs | 2993 +++++++++++++++++ native/libcst/src/parser/mod.rs | 6 + native/libcst/src/parser/numbers.rs | 68 + native/libcst/src/py.rs | 25 + native/libcst/src/tokenizer/core/LICENSE | 46 + native/libcst/src/tokenizer/core/README.md | 2 + native/libcst/src/tokenizer/core/mod.rs | 1144 +++++++ .../libcst/src/tokenizer/core/string_types.rs | 119 + native/libcst/src/tokenizer/debug_utils.rs | 16 + native/libcst/src/tokenizer/mod.rs | 15 + native/libcst/src/tokenizer/operators.rs | 85 + native/libcst/src/tokenizer/tests.rs | 689 ++++ .../src/tokenizer/text_position/char_width.rs | 329 ++ .../libcst/src/tokenizer/text_position/mod.rs | 353 ++ .../libcst/src/tokenizer/whitespace_parser.rs | 401 +++ .../libcst/tests/fixtures/class_craziness.py | 28 + native/libcst/tests/fixtures/comments.py | 101 + native/libcst/tests/fixtures/comparisons.py | 21 + .../decorated_function_without_body.py | 3 + .../tests/fixtures/dysfunctional_del.py | 14 + native/libcst/tests/fixtures/expr.py | 375 +++ .../libcst/tests/fixtures/expr_statement.py | 11 + .../tests/fixtures/fun_with_func_defs.py | 146 + .../libcst/tests/fixtures/global_nonlocal.py | 4 + native/libcst/tests/fixtures/import.py | 19 + .../fixtures/indents_but_no_eol_before_eof.py | 4 + .../fixtures/just_a_comment_without_nl.py | 1 + native/libcst/tests/fixtures/raise.py | 4 + .../libcst/tests/fixtures/smol_statements.py | 4 + .../libcst/tests/fixtures/spacious_spaces.py | 1 + .../libcst/tests/fixtures/suicidal_slices.py | 28 + native/libcst/tests/fixtures/super_strings.py | 28 + .../libcst/tests/fixtures/terrible_tries.py | 71 + .../fixtures/trailing_comment_without_nl.py | 4 + .../tests/fixtures/tuple_shenanigans.py | 28 + .../libcst/tests/fixtures/vast_emptiness.py | 0 .../libcst/tests/fixtures/with_wickedness.py | 13 + native/libcst/tests/fixtures/wonky_walrus.py | 13 + native/libcst/tests/parser_roundtrip.rs | 50 + native/libcst_derive/Cargo.toml | 11 + native/libcst_derive/src/codegen.rs | 63 + native/libcst_derive/src/inflate.rs | 63 + native/libcst_derive/src/into_py.rs | 177 + native/libcst_derive/src/lib.rs | 31 + .../libcst_derive/src/parenthesized_node.rs | 93 + native/roundtrip.sh | 5 + pyproject.toml | 7 + requirements-dev.txt | 4 +- setup.py | 9 + stubs/libcst_native/parser_config.pyi | 44 + stubs/libcst_native/token_type.pyi | 25 + stubs/libcst_native/tokenize.pyi | 29 + stubs/libcst_native/whitespace_parser.pyi | 28 + stubs/libcst_native/whitespace_state.pyi | 15 + 120 files changed, 17118 insertions(+), 478 deletions(-) create mode 100644 .cargo/config.toml create mode 100644 libcst/_parser/parso/python/py_token.py create mode 100644 libcst/_parser/py_whitespace_parser.py create mode 100644 libcst/_parser/types/py_config.py create mode 100644 libcst/_parser/types/py_token.py create mode 100644 libcst/_parser/types/py_whitespace_state.py create mode 100644 native/Cargo.lock create mode 100644 native/Cargo.toml create mode 100644 native/libcst/Cargo.toml create mode 100644 native/libcst/Grammar create mode 100644 native/libcst/README.md create mode 100644 native/libcst/benches/parser_benchmark.rs create mode 100644 native/libcst/src/bin.rs create mode 100644 native/libcst/src/lib.rs create mode 100644 native/libcst/src/nodes/codegen.rs create mode 100644 native/libcst/src/nodes/expression.rs create mode 100644 native/libcst/src/nodes/inflate_helpers.rs create mode 100644 native/libcst/src/nodes/macros.rs create mode 100644 native/libcst/src/nodes/mod.rs create mode 100644 native/libcst/src/nodes/module.rs create mode 100644 native/libcst/src/nodes/op.rs create mode 100644 native/libcst/src/nodes/parser_config.rs create mode 100644 native/libcst/src/nodes/py_cached.rs create mode 100644 native/libcst/src/nodes/statement.rs create mode 100644 native/libcst/src/nodes/test_utils.rs create mode 100644 native/libcst/src/nodes/traits.rs create mode 100644 native/libcst/src/nodes/whitespace.rs create mode 100644 native/libcst/src/parser/errors.rs create mode 100644 native/libcst/src/parser/grammar.rs create mode 100644 native/libcst/src/parser/mod.rs create mode 100644 native/libcst/src/parser/numbers.rs create mode 100644 native/libcst/src/py.rs create mode 100644 native/libcst/src/tokenizer/core/LICENSE create mode 100644 native/libcst/src/tokenizer/core/README.md create mode 100644 native/libcst/src/tokenizer/core/mod.rs create mode 100644 native/libcst/src/tokenizer/core/string_types.rs create mode 100644 native/libcst/src/tokenizer/debug_utils.rs create mode 100644 native/libcst/src/tokenizer/mod.rs create mode 100644 native/libcst/src/tokenizer/operators.rs create mode 100644 native/libcst/src/tokenizer/tests.rs create mode 100644 native/libcst/src/tokenizer/text_position/char_width.rs create mode 100644 native/libcst/src/tokenizer/text_position/mod.rs create mode 100644 native/libcst/src/tokenizer/whitespace_parser.rs create mode 100644 native/libcst/tests/fixtures/class_craziness.py create mode 100644 native/libcst/tests/fixtures/comments.py create mode 100644 native/libcst/tests/fixtures/comparisons.py create mode 100644 native/libcst/tests/fixtures/decorated_function_without_body.py create mode 100644 native/libcst/tests/fixtures/dysfunctional_del.py create mode 100644 native/libcst/tests/fixtures/expr.py create mode 100644 native/libcst/tests/fixtures/expr_statement.py create mode 100644 native/libcst/tests/fixtures/fun_with_func_defs.py create mode 100644 native/libcst/tests/fixtures/global_nonlocal.py create mode 100644 native/libcst/tests/fixtures/import.py create mode 100644 native/libcst/tests/fixtures/indents_but_no_eol_before_eof.py create mode 100644 native/libcst/tests/fixtures/just_a_comment_without_nl.py create mode 100644 native/libcst/tests/fixtures/raise.py create mode 100644 native/libcst/tests/fixtures/smol_statements.py create mode 100644 native/libcst/tests/fixtures/spacious_spaces.py create mode 100644 native/libcst/tests/fixtures/suicidal_slices.py create mode 100644 native/libcst/tests/fixtures/super_strings.py create mode 100644 native/libcst/tests/fixtures/terrible_tries.py create mode 100644 native/libcst/tests/fixtures/trailing_comment_without_nl.py create mode 100644 native/libcst/tests/fixtures/tuple_shenanigans.py create mode 100644 native/libcst/tests/fixtures/vast_emptiness.py create mode 100644 native/libcst/tests/fixtures/with_wickedness.py create mode 100644 native/libcst/tests/fixtures/wonky_walrus.py create mode 100644 native/libcst/tests/parser_roundtrip.rs create mode 100644 native/libcst_derive/Cargo.toml create mode 100644 native/libcst_derive/src/codegen.rs create mode 100644 native/libcst_derive/src/inflate.rs create mode 100644 native/libcst_derive/src/into_py.rs create mode 100644 native/libcst_derive/src/lib.rs create mode 100644 native/libcst_derive/src/parenthesized_node.rs create mode 100755 native/roundtrip.sh create mode 100644 stubs/libcst_native/parser_config.pyi create mode 100644 stubs/libcst_native/token_type.pyi create mode 100644 stubs/libcst_native/tokenize.pyi create mode 100644 stubs/libcst_native/whitespace_parser.pyi create mode 100644 stubs/libcst_native/whitespace_state.pyi diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..59c989e6 --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,11 @@ +[target.x86_64-apple-darwin] +rustflags = [ + "-C", "link-arg=-undefined", + "-C", "link-arg=dynamic_lookup", +] + +[target.aarch64-apple-darwin] +rustflags = [ + "-C", "link-arg=-undefined", + "-C", "link-arg=dynamic_lookup", +] \ No newline at end of file diff --git a/.editorconfig b/.editorconfig index 0824f669..c4f3c65d 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,6 +1,6 @@ root = true -[*.{py,pyi,toml,md}] +[*.{py,pyi,rs,toml,md}] charset = "utf-8" end_of_line = lf indent_size = 4 @@ -8,3 +8,7 @@ indent_style = space insert_final_newline = true trim_trailing_whitespace = true max_line_length = 88 + +[*.rs] +# https://github.com/rust-dev-tools/fmt-rfcs/blob/master/guide/guide.md +max_line_length = 100 diff --git a/.flake8 b/.flake8 index e681a88c..cf63afc5 100644 --- a/.flake8 +++ b/.flake8 @@ -106,6 +106,7 @@ exclude = .pyre, __pycache__, .tox, + native, max-complexity = 12 diff --git a/.github/workflows/.pyre_configuration.template b/.github/workflows/.pyre_configuration.template index dc25a33f..4b5b2007 100644 --- a/.github/workflows/.pyre_configuration.template +++ b/.github/workflows/.pyre_configuration.template @@ -1,4 +1,7 @@ { + "exclude": [ + ".*\/native\/.*" + ], "source_directories": [ "." ], diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4ecbea3c..ccdb9171 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -35,6 +35,7 @@ jobs: matrix: os: [ubuntu-latest, macos-latest, windows-latest] python-version: [3.6, 3.7, 3.8, 3.9] + parser: [pure, native] steps: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 @@ -48,8 +49,14 @@ jobs: - name: Validate Dependencies if: steps.cache.outputs.cache-hit != 'true' run: exit 1 + - if: ${{ matrix.parser == 'native' }} + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + - run: >- + echo LIBCST_PARSER_TYPE=${{ matrix.parser }} >> $GITHUB_ENV - name: Run Tests - run: python -m unittest + run: python setup.py test # Run linters lint: @@ -166,10 +173,37 @@ jobs: # Build python package build: + name: Build wheels on ${{ matrix.os }}/${{ matrix.vers }} needs: setup - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - vers: i686 + os: ubuntu-20.04 + # aarch64 seems to be stuck + # - vers: aarch64 + # os: ubuntu-20.04 + - vers: auto64 + os: ubuntu-20.04 + - vers: arm64 + os: macos-10.15 + - vers: auto64 + os: macos-10.15 + - vers: auto64 + os: windows-2019 + env: + SCCACHE_VERSION: 0.2.13 + CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y" + CIBW_BEFORE_ALL_MACOS: "rustup target add aarch64-apple-darwin x86_64-apple-darwin" + CIBW_BEFORE_ALL_WINDOWS: "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" + CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin"' + CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_*" + CIBW_ARCHS: ${{ matrix.vers }} + CIBW_BUILD_VERBOSITY: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: 3.9 @@ -185,13 +219,17 @@ jobs: if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - - name: Build a binary wheel and a source tarball + - name: Build wheels + uses: pypa/cibuildwheel@v2.3.1 + - uses: actions/upload-artifact@v2 + with: + path: wheelhouse/*.whl + - name: Build a source tarball run: >- python -m build --sdist - --wheel - --outdir dist/ + --outdir wheelhouse/ - if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} name: Publish distribution 📦 to Test PyPI uses: pypa/gh-action-pypi-publish@release/v1 @@ -199,3 +237,48 @@ jobs: user: __token__ password: ${{ secrets.TEST_PYPI_API_TOKEN }} repository_url: https://test.pypi.org/legacy/ + packages_dir: wheelhouse/ + +# Test rust parts + native: + name: Rust unit tests + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + toolchain: stable + components: rustfmt, clippy + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + - name: test + uses: actions-rs/cargo@v1 + with: + command: test + args: --manifest-path=native/Cargo.toml + - name: clippy + uses: actions-rs/clippy-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + args: --manifest-path=native/Cargo.toml --all-features + + rustfmt: + name: Rustfmt + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + - run: rustup component add rustfmt + - uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all --manifest-path=native/Cargo.toml -- --check diff --git a/.gitignore b/.gitignore index 4a2bbd69..dbe480d7 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ libcst/_version.py .hypothesis/ .pyre_configuration .python-version +target/ diff --git a/LICENSE b/LICENSE index 0c823502..13df011c 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ All contributions towards LibCST are MIT licensed. -Some Python files have been taken from the standard library and are therefore +Some Python files have been derived from the standard library and are therefore PSF licensed. Modifications on these files are dual licensed (both MIT and PSF). These files are: @@ -8,11 +8,13 @@ PSF). These files are: - libcst/_parser/parso/utils.py - libcst/_parser/parso/pgen2/generator.py - libcst/_parser/parso/pgen2/grammar_parser.py -- libcst/_parser/parso/python/token.py +- libcst/_parser/parso/python/py_token.py - libcst/_parser/parso/python/tokenize.py - libcst/_parser/parso/tests/test_fstring.py - libcst/_parser/parso/tests/test_tokenize.py - libcst/_parser/parso/tests/test_utils.py +- libcst_native/src/tokenize/core/mod.rs +- libcst_native/src/tokenize/core/string_types.rs Some Python files have been taken from dataclasses and are therefore Apache licensed. Modifications on these files are licensed under Apache 2.0 license. diff --git a/MANIFEST.in b/MANIFEST.in index 4402255d..0f3912c6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,4 @@ include README.rst LICENSE CODE_OF_CONDUCT.md CONTRIBUTING.md requirements.txt requirements-dev.txt docs/source/*.rst libcst/py.typed + +include native/Cargo.toml +recursive-include native * diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index da3ea9ef..d6544e2f 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -9,6 +9,7 @@ from typing import Any import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -1120,6 +1121,8 @@ class AtomTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_dict.py b/libcst/_nodes/tests/test_dict.py index 425adb79..430be588 100644 --- a/libcst/_nodes/tests/test_dict.py +++ b/libcst/_nodes/tests/test_dict.py @@ -8,6 +8,7 @@ from typing import Any import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -187,4 +188,6 @@ class DictTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index a5d0d080..4675f918 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -8,6 +8,7 @@ from typing import Any, Callable import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -2041,4 +2042,6 @@ class FunctionDefParserTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_import.py b/libcst/_nodes/tests/test_import.py index f911029c..0da7c38c 100644 --- a/libcst/_nodes/tests/test_import.py +++ b/libcst/_nodes/tests/test_import.py @@ -617,8 +617,10 @@ class ImportFromParseTest(CSTNodeTest): ), cst.ImportAlias(cst.Name("baz"), comma=cst.Comma()), ), + lpar=cst.LeftParen(), + rpar=cst.RightParen(), ), - "code": "from foo import bar, baz,", + "code": "from foo import (bar, baz,)", }, # Star import statement { diff --git a/libcst/_nodes/tests/test_list.py b/libcst/_nodes/tests/test_list.py index a4a08b95..e2f8bd18 100644 --- a/libcst/_nodes/tests/test_list.py +++ b/libcst/_nodes/tests/test_list.py @@ -8,6 +8,7 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression, parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -126,4 +127,6 @@ class ListTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_matrix_multiply.py b/libcst/_nodes/tests/test_matrix_multiply.py index 9f50dd28..b91147e6 100644 --- a/libcst/_nodes/tests/test_matrix_multiply.py +++ b/libcst/_nodes/tests/test_matrix_multiply.py @@ -11,6 +11,7 @@ from libcst._nodes.tests.base import ( parse_expression_as, parse_statement_as, ) +from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider @@ -69,4 +70,6 @@ class NamedExprTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_module.py b/libcst/_nodes/tests/test_module.py index 671a23a6..57a8fd43 100644 --- a/libcst/_nodes/tests/test_module.py +++ b/libcst/_nodes/tests/test_module.py @@ -8,6 +8,7 @@ from typing import Tuple, cast import libcst as cst from libcst import parse_module, parse_statement from libcst._nodes.tests.base import CSTNodeTest +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange, MetadataWrapper, PositionProvider from libcst.testing.utils import data_provider @@ -83,6 +84,7 @@ class ModuleTest(CSTNodeTest): "empty_program_with_newline": { "code": "\n", "expected": cst.Module([], has_trailing_newline=True), + "enabled_for_native": False, }, "empty_program_with_comments": { "code": "# some comment\n", @@ -112,7 +114,11 @@ class ModuleTest(CSTNodeTest): }, } ) - def test_parser(self, *, code: str, expected: cst.Module) -> None: + def test_parser( + self, *, code: str, expected: cst.Module, enabled_for_native: bool = True + ) -> None: + if is_native() and not enabled_for_native: + self.skipTest("Disabled for native parser") self.assertEqual(parse_module(code), expected) @data_provider( diff --git a/libcst/_nodes/tests/test_set.py b/libcst/_nodes/tests/test_set.py index 434bf0ab..3c55268f 100644 --- a/libcst/_nodes/tests/test_set.py +++ b/libcst/_nodes/tests/test_set.py @@ -8,6 +8,7 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider @@ -133,4 +134,6 @@ class ListTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_tuple.py b/libcst/_nodes/tests/test_tuple.py index f3a49bed..6f3b9806 100644 --- a/libcst/_nodes/tests/test_tuple.py +++ b/libcst/_nodes/tests/test_tuple.py @@ -8,6 +8,7 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression, parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -90,41 +91,6 @@ class TupleTest(CSTNodeTest): "parser": parse_expression, "expected_position": CodeRange((1, 1), (1, 11)), }, - # custom parenthesis on StarredElement - { - "node": cst.Tuple( - [ - cst.StarredElement( - cst.Name("abc"), - lpar=[cst.LeftParen()], - rpar=[cst.RightParen()], - comma=cst.Comma(), - ) - ] - ), - "code": "((*abc),)", - "parser": parse_expression, - "expected_position": CodeRange((1, 1), (1, 8)), - }, - # custom whitespace on StarredElement - { - "node": cst.Tuple( - [ - cst.Element(cst.Name("one"), comma=cst.Comma()), - cst.StarredElement( - cst.Name("two"), - whitespace_before_value=cst.SimpleWhitespace(" "), - lpar=[cst.LeftParen()], - rpar=[cst.RightParen()], - ), - ], - lpar=[], - rpar=[], # rpar can't own the trailing whitespace if it's not there - ), - "code": "one,(* two)", - "parser": parse_expression, - "expected_position": CodeRange((1, 0), (1, 12)), - }, # missing spaces around tuple, okay with parenthesis { "node": cst.For( @@ -279,4 +245,6 @@ class TupleTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_with.py b/libcst/_nodes/tests/test_with.py index b74487c7..7a53c2be 100644 --- a/libcst/_nodes/tests/test_with.py +++ b/libcst/_nodes/tests/test_with.py @@ -8,6 +8,7 @@ from typing import Any import libcst as cst from libcst import PartialParserConfig, parse_statement from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -230,4 +231,6 @@ class WithTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_yield.py b/libcst/_nodes/tests/test_yield.py index 83263beb..7194c04c 100644 --- a/libcst/_nodes/tests/test_yield.py +++ b/libcst/_nodes/tests/test_yield.py @@ -8,6 +8,7 @@ from typing import Any, Callable, Optional import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_statement_as +from libcst._parser.entrypoints import is_native from libcst.helpers import ensure_type from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -240,4 +241,6 @@ class YieldParsingTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: + if is_native() and not kwargs.get("expect_success", True): + self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index 59f2defa..7d68a168 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -581,8 +581,7 @@ def convert_atom_expr( return child -@with_production("atom_expr_await", "'await' atom_expr_trailer", version=">=3.7") -@with_production("atom_expr_await", "AWAIT atom_expr_trailer", version="<=3.6") +@with_production("atom_expr_await", "AWAIT atom_expr_trailer") def convert_atom_expr_await( config: ParserConfig, children: typing.Sequence[typing.Any] ) -> typing.Any: @@ -1509,8 +1508,7 @@ def convert_sync_comp_for( ) -@with_production("comp_for", "['async'] sync_comp_for", version=">=3.7") -@with_production("comp_for", "[ASYNC] sync_comp_for", version="==3.6") +@with_production("comp_for", "[ASYNC] sync_comp_for", version=">=3.6") @with_production("comp_for", "sync_comp_for", version="<=3.5") def convert_comp_for( config: ParserConfig, children: typing.Sequence[typing.Any] diff --git a/libcst/_parser/conversions/statement.py b/libcst/_parser/conversions/statement.py index ae0b1d17..f6ac7fb6 100644 --- a/libcst/_parser/conversions/statement.py +++ b/libcst/_parser/conversions/statement.py @@ -1062,8 +1062,7 @@ def _extract_async( return (parse_empty_lines(config, whitespace_before), asyncnode, stmt.value) -@with_production("asyncable_funcdef", "['async'] funcdef", version=">=3.7") -@with_production("asyncable_funcdef", "[ASYNC] funcdef", version=">=3.5,<3.7") +@with_production("asyncable_funcdef", "[ASYNC] funcdef", version=">=3.5") @with_production("asyncable_funcdef", "funcdef", version="<3.5") def convert_asyncable_funcdef(config: ParserConfig, children: Sequence[Any]) -> Any: leading_lines, asyncnode, funcdef = _extract_async(config, children) @@ -1310,10 +1309,7 @@ def convert_decorated(config: ParserConfig, children: Sequence[Any]) -> Any: @with_production( - "asyncable_stmt", "['async'] (funcdef | with_stmt | for_stmt)", version=">=3.7" -) -@with_production( - "asyncable_stmt", "[ASYNC] (funcdef | with_stmt | for_stmt)", version=">=3.5,<3.7" + "asyncable_stmt", "[ASYNC] (funcdef | with_stmt | for_stmt)", version=">=3.5" ) @with_production("asyncable_stmt", "funcdef | with_stmt | for_stmt", version="<3.5") def convert_asyncable_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: diff --git a/libcst/_parser/detect_config.py b/libcst/_parser/detect_config.py index ca13e7c2..ae3222c1 100644 --- a/libcst/_parser/detect_config.py +++ b/libcst/_parser/detect_config.py @@ -9,7 +9,7 @@ import re from dataclasses import dataclass from io import BytesIO from tokenize import detect_encoding as py_tokenize_detect_encoding -from typing import FrozenSet, Iterable, Iterator, Pattern, Set, Union +from typing import FrozenSet, Iterable, Iterator, Pattern, Set, Tuple, Union from libcst._nodes.whitespace import NEWLINE_RE from libcst._parser.parso.python.token import PythonTokenTypes, TokenType @@ -114,6 +114,23 @@ def _detect_future_imports(tokens: Iterable[Token]) -> FrozenSet[str]: return frozenset(future_imports) +def convert_to_utf8( + source: Union[str, bytes], *, partial: PartialParserConfig +) -> Tuple[str, str]: + """ + Returns an (original encoding, converted source) tuple. + """ + partial_encoding = partial.encoding + encoding = ( + _detect_encoding(source) + if isinstance(partial_encoding, AutoConfig) + else partial_encoding + ) + + source_str = source if isinstance(source, str) else source.decode(encoding) + return (encoding, source_str) + + def detect_config( source: Union[str, bytes], *, @@ -128,14 +145,7 @@ def detect_config( python_version = partial.parsed_python_version - partial_encoding = partial.encoding - encoding = ( - _detect_encoding(source) - if isinstance(partial_encoding, AutoConfig) - else partial_encoding - ) - - source_str = source if isinstance(source, str) else source.decode(encoding) + encoding, source_str = convert_to_utf8(source, partial=partial) partial_default_newline = partial.default_newline default_newline = ( @@ -162,7 +172,7 @@ def detect_config( lines = split_lines(source_str, keepends=True) - tokens = tokenize_lines(lines, python_version) + tokens = tokenize_lines(source_str, lines, python_version) partial_default_indent = partial.default_indent if isinstance(partial_default_indent, AutoConfig): diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index f9e78ad8..28966cd1 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -9,13 +9,15 @@ parser. A parser entrypoint should take the source code and some configuration information """ +import os +from functools import partial from typing import Union from libcst._nodes.base import CSTNode from libcst._nodes.expression import BaseExpression from libcst._nodes.module import Module from libcst._nodes.statement import BaseCompoundStatement, SimpleStatementLine -from libcst._parser.detect_config import detect_config +from libcst._parser.detect_config import convert_to_utf8, detect_config from libcst._parser.grammar import get_grammar, validate_grammar from libcst._parser.python_parser import PythonCSTParser from libcst._parser.types.config import PartialParserConfig @@ -23,6 +25,11 @@ from libcst._parser.types.config import PartialParserConfig _DEFAULT_PARTIAL_PARSER_CONFIG: PartialParserConfig = PartialParserConfig() +def is_native() -> bool: + typ = os.environ.get("LIBCST_PARSER_TYPE", None) + return typ == "native" + + def _parse( entrypoint: str, source: Union[str, bytes], @@ -30,6 +37,38 @@ def _parse( *, detect_trailing_newline: bool, detect_default_newline: bool, +) -> CSTNode: + if is_native(): + from libcst.native import parse_module, parse_expression, parse_statement + + encoding, source_str = convert_to_utf8(source, partial=config) + + if entrypoint == "file_input": + parse = partial(parse_module, encoding=encoding) + elif entrypoint == "stmt_input": + parse = parse_statement + elif entrypoint == "expression_input": + parse = parse_expression + else: + raise ValueError(f"Unknown parser entry point: {entrypoint}") + + return parse(source_str) + return _pure_python_parse( + entrypoint, + source, + config, + detect_trailing_newline=detect_trailing_newline, + detect_default_newline=detect_default_newline, + ) + + +def _pure_python_parse( + entrypoint: str, + source: Union[str, bytes], + config: PartialParserConfig, + *, + detect_trailing_newline: bool, + detect_default_newline: bool, ) -> CSTNode: detection_result = detect_config( source, diff --git a/libcst/_parser/parso/python/py_token.py b/libcst/_parser/parso/python/py_token.py new file mode 100644 index 00000000..204ce94d --- /dev/null +++ b/libcst/_parser/parso/python/py_token.py @@ -0,0 +1,48 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. +# +# Modifications: +# Copyright David Halter and Contributors +# Modifications are dual-licensed: MIT and PSF. +# 99% of the code is different from pgen2, now. +# +# A fork of `parso.python.token`. +# https://github.com/davidhalter/parso/blob/master/parso/python/token.py +# +# The following changes were made: +# - Explicit TokenType references instead of dynamic creation. +# - Use dataclasses instead of raw classes. +# pyre-unsafe + +from dataclasses import dataclass + + +@dataclass(frozen=True) +class TokenType: + name: str + contains_syntax: bool = False + + def __repr__(self) -> str: + return "%s(%s)" % (self.__class__.__name__, self.name) + + +class PythonTokenTypes: + """ + Basically an enum, but Python 2 doesn't have enums in the standard library. + """ + + STRING: TokenType = TokenType("STRING") + NUMBER: TokenType = TokenType("NUMBER") + NAME: TokenType = TokenType("NAME", contains_syntax=True) + ERRORTOKEN: TokenType = TokenType("ERRORTOKEN") + NEWLINE: TokenType = TokenType("NEWLINE") + INDENT: TokenType = TokenType("INDENT") + DEDENT: TokenType = TokenType("DEDENT") + ERROR_DEDENT: TokenType = TokenType("ERROR_DEDENT") + ASYNC: TokenType = TokenType("ASYNC") + AWAIT: TokenType = TokenType("AWAIT") + FSTRING_STRING: TokenType = TokenType("FSTRING_STRING") + FSTRING_START: TokenType = TokenType("FSTRING_START") + FSTRING_END: TokenType = TokenType("FSTRING_END") + OP: TokenType = TokenType("OP", contains_syntax=True) + ENDMARKER: TokenType = TokenType("ENDMARKER") diff --git a/libcst/_parser/parso/python/token.py b/libcst/_parser/parso/python/token.py index 204ce94d..e4798f10 100644 --- a/libcst/_parser/parso/python/token.py +++ b/libcst/_parser/parso/python/token.py @@ -1,48 +1,34 @@ -# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. +# Copyright (c) Facebook, Inc. and its affiliates. # -# Modifications: -# Copyright David Halter and Contributors -# Modifications are dual-licensed: MIT and PSF. -# 99% of the code is different from pgen2, now. -# -# A fork of `parso.python.token`. -# https://github.com/davidhalter/parso/blob/master/parso/python/token.py -# -# The following changes were made: -# - Explicit TokenType references instead of dynamic creation. -# - Use dataclasses instead of raw classes. -# pyre-unsafe +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. -from dataclasses import dataclass +try: + from libcst_native import token_type as native_token_type + + TokenType = native_token_type.TokenType + + class PythonTokenTypes: + STRING: TokenType = native_token_type.STRING + NUMBER: TokenType = native_token_type.NUMBER + NAME: TokenType = native_token_type.NAME + NEWLINE: TokenType = native_token_type.NEWLINE + INDENT: TokenType = native_token_type.INDENT + DEDENT: TokenType = native_token_type.DEDENT + ASYNC: TokenType = native_token_type.ASYNC + AWAIT: TokenType = native_token_type.AWAIT + FSTRING_STRING: TokenType = native_token_type.FSTRING_STRING + FSTRING_START: TokenType = native_token_type.FSTRING_START + FSTRING_END: TokenType = native_token_type.FSTRING_END + OP: TokenType = native_token_type.OP + ENDMARKER: TokenType = native_token_type.ENDMARKER + # unused dummy tokens for backwards compat with the parso tokenizer + ERRORTOKEN: TokenType = native_token_type.ERRORTOKEN + ERROR_DEDENT: TokenType = native_token_type.ERROR_DEDENT -@dataclass(frozen=True) -class TokenType: - name: str - contains_syntax: bool = False - - def __repr__(self) -> str: - return "%s(%s)" % (self.__class__.__name__, self.name) - - -class PythonTokenTypes: - """ - Basically an enum, but Python 2 doesn't have enums in the standard library. - """ - - STRING: TokenType = TokenType("STRING") - NUMBER: TokenType = TokenType("NUMBER") - NAME: TokenType = TokenType("NAME", contains_syntax=True) - ERRORTOKEN: TokenType = TokenType("ERRORTOKEN") - NEWLINE: TokenType = TokenType("NEWLINE") - INDENT: TokenType = TokenType("INDENT") - DEDENT: TokenType = TokenType("DEDENT") - ERROR_DEDENT: TokenType = TokenType("ERROR_DEDENT") - ASYNC: TokenType = TokenType("ASYNC") - AWAIT: TokenType = TokenType("AWAIT") - FSTRING_STRING: TokenType = TokenType("FSTRING_STRING") - FSTRING_START: TokenType = TokenType("FSTRING_START") - FSTRING_END: TokenType = TokenType("FSTRING_END") - OP: TokenType = TokenType("OP", contains_syntax=True) - ENDMARKER: TokenType = TokenType("ENDMARKER") +except ImportError: + from libcst._parser.parso.python.py_token import ( # noqa F401 + PythonTokenTypes, + TokenType, + ) diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index e816cd62..380246f0 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -995,7 +995,14 @@ def _tokenize_lines_py37_or_above( # noqa: C901 indents.append(indent) break if str.isidentifier(token): - yield PythonToken(NAME, token, spos, prefix) + # py37 doesn't need special tokens for async/await, and we could + # emit NAME, but then we'd need different grammar for py36 and py37. + if token == "async": + yield PythonToken(ASYNC, token, spos, prefix) + elif token == "await": + yield PythonToken(AWAIT, token, spos, prefix) + else: + yield PythonToken(NAME, token, spos, prefix) else: for t in _split_illegal_unicode_name(token, spos, prefix): yield t # yield from Python 2 diff --git a/libcst/_parser/py_whitespace_parser.py b/libcst/_parser/py_whitespace_parser.py new file mode 100644 index 00000000..6eabc8ea --- /dev/null +++ b/libcst/_parser/py_whitespace_parser.py @@ -0,0 +1,260 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Sequence, Tuple, Union + +from libcst._nodes.whitespace import ( + COMMENT_RE, + NEWLINE_RE, + SIMPLE_WHITESPACE_RE, + Comment, + EmptyLine, + Newline, + ParenthesizedWhitespace, + SimpleWhitespace, + TrailingWhitespace, +) +from libcst._parser.types.config import BaseWhitespaceParserConfig +from libcst._parser.types.whitespace_state import WhitespaceState as State + +# BEGIN PARSER ENTRYPOINTS + + +def parse_simple_whitespace( + config: BaseWhitespaceParserConfig, state: State +) -> SimpleWhitespace: + # The match never fails because the pattern can match an empty string + lines = config.lines + # pyre-fixme[16]: Optional type has no attribute `group`. + ws_line = SIMPLE_WHITESPACE_RE.match(lines[state.line - 1], state.column).group(0) + ws_line_list = [ws_line] + while "\\" in ws_line: + # continuation character + state.line += 1 + state.column = 0 + ws_line = SIMPLE_WHITESPACE_RE.match(lines[state.line - 1], state.column).group( + 0 + ) + ws_line_list.append(ws_line) + + # TODO: we could special-case the common case where there's no continuation + # character to avoid list construction and joining. + + # once we've finished collecting continuation characters + state.column += len(ws_line) + return SimpleWhitespace("".join(ws_line_list)) + + +def parse_empty_lines( + config: BaseWhitespaceParserConfig, + state: State, + *, + override_absolute_indent: Optional[str] = None, +) -> Sequence[EmptyLine]: + # If override_absolute_indent is true, then we need to parse all lines up + # to and including the last line that is indented at our level. These all + # belong to the footer and not to the next line's leading_lines. All lines + # that have indent=False and come after the last line where indent=True + # do not belong to this node. + state_for_line = State( + state.line, state.column, state.absolute_indent, state.is_parenthesized + ) + lines: List[Tuple[State, EmptyLine]] = [] + while True: + el = _parse_empty_line( + config, state_for_line, override_absolute_indent=override_absolute_indent + ) + if el is None: + break + + # Store the updated state with the element we parsed. Then make a new state + # clone for the next element. + lines.append((state_for_line, el)) + state_for_line = State( + state_for_line.line, + state_for_line.column, + state.absolute_indent, + state.is_parenthesized, + ) + + if override_absolute_indent is not None: + # We need to find the last element that is indented, and then split the list + # at that point. + for i in range(len(lines) - 1, -1, -1): + if lines[i][1].indent: + lines = lines[: (i + 1)] + break + else: + # We didn't find any lines, throw them all away + lines = [] + + if lines: + # Update the state line and column to match the last line actually parsed. + final_state: State = lines[-1][0] + state.line = final_state.line + state.column = final_state.column + return [r[1] for r in lines] + + +def parse_trailing_whitespace( + config: BaseWhitespaceParserConfig, state: State +) -> TrailingWhitespace: + trailing_whitespace = _parse_trailing_whitespace(config, state) + if trailing_whitespace is None: + raise Exception( + "Internal Error: Failed to parse TrailingWhitespace. This should never " + + "happen because a TrailingWhitespace is never optional in the grammar, " + + "so this error should've been caught by parso first." + ) + return trailing_whitespace + + +def parse_parenthesizable_whitespace( + config: BaseWhitespaceParserConfig, state: State +) -> Union[SimpleWhitespace, ParenthesizedWhitespace]: + if state.is_parenthesized: + # First, try parenthesized (don't need speculation because it either + # parses or doesn't modify state). + parenthesized_whitespace = _parse_parenthesized_whitespace(config, state) + if parenthesized_whitespace is not None: + return parenthesized_whitespace + # Now, just parse and return a simple whitespace + return parse_simple_whitespace(config, state) + + +# END PARSER ENTRYPOINTS +# BEGIN PARSER INTERNAL PRODUCTIONS + + +def _parse_empty_line( + config: BaseWhitespaceParserConfig, + state: State, + *, + override_absolute_indent: Optional[str] = None, +) -> Optional[EmptyLine]: + # begin speculative parsing + speculative_state = State( + state.line, state.column, state.absolute_indent, state.is_parenthesized + ) + try: + indent = _parse_indent( + config, speculative_state, override_absolute_indent=override_absolute_indent + ) + except Exception: + # We aren't on a new line, speculative parsing failed + return None + whitespace = parse_simple_whitespace(config, speculative_state) + comment = _parse_comment(config, speculative_state) + newline = _parse_newline(config, speculative_state) + if newline is None: + # speculative parsing failed + return None + # speculative parsing succeeded + state.line = speculative_state.line + state.column = speculative_state.column + # don't need to copy absolute_indent/is_parenthesized because they don't change. + return EmptyLine(indent, whitespace, comment, newline) + + +def _parse_indent( + config: BaseWhitespaceParserConfig, + state: State, + *, + override_absolute_indent: Optional[str] = None, +) -> bool: + """ + Returns True if indentation was found, otherwise False. + """ + absolute_indent = ( + override_absolute_indent + if override_absolute_indent is not None + else state.absolute_indent + ) + line_str = config.lines[state.line - 1] + if state.column != 0: + if state.column == len(line_str) and state.line == len(config.lines): + # We're at EOF, treat this as a failed speculative parse + return False + raise Exception("Internal Error: Column should be 0 when parsing an indent.") + if line_str.startswith(absolute_indent, state.column): + state.column += len(absolute_indent) + return True + return False + + +def _parse_comment( + config: BaseWhitespaceParserConfig, state: State +) -> Optional[Comment]: + comment_match = COMMENT_RE.match(config.lines[state.line - 1], state.column) + if comment_match is None: + return None + comment = comment_match.group(0) + state.column += len(comment) + return Comment(comment) + + +def _parse_newline( + config: BaseWhitespaceParserConfig, state: State +) -> Optional[Newline]: + # begin speculative parsing + line_str = config.lines[state.line - 1] + newline_match = NEWLINE_RE.match(line_str, state.column) + if newline_match is not None: + # speculative parsing succeeded + newline_str = newline_match.group(0) + state.column += len(newline_str) + if state.column != len(line_str): + raise Exception("Internal Error: Found a newline, but it wasn't the EOL.") + if state.line < len(config.lines): + # this newline was the end of a line, and there's another line, + # therefore we should move to the next line + state.line += 1 + state.column = 0 + if newline_str == config.default_newline: + # Just inherit it from the Module instead of explicitly setting it. + return Newline() + else: + return Newline(newline_str) + else: # no newline was found, speculative parsing failed + return None + + +def _parse_trailing_whitespace( + config: BaseWhitespaceParserConfig, state: State +) -> Optional[TrailingWhitespace]: + # Begin speculative parsing + speculative_state = State( + state.line, state.column, state.absolute_indent, state.is_parenthesized + ) + whitespace = parse_simple_whitespace(config, speculative_state) + comment = _parse_comment(config, speculative_state) + newline = _parse_newline(config, speculative_state) + if newline is None: + # Speculative parsing failed + return None + # Speculative parsing succeeded + state.line = speculative_state.line + state.column = speculative_state.column + # don't need to copy absolute_indent/is_parenthesized because they don't change. + return TrailingWhitespace(whitespace, comment, newline) + + +def _parse_parenthesized_whitespace( + config: BaseWhitespaceParserConfig, state: State +) -> Optional[ParenthesizedWhitespace]: + first_line = _parse_trailing_whitespace(config, state) + if first_line is None: + # Speculative parsing failed + return None + empty_lines = () + while True: + empty_line = _parse_empty_line(config, state) + if empty_line is None: + # This isn't an empty line, so parse it below + break + empty_lines = empty_lines + (empty_line,) + indent = _parse_indent(config, state) + last_line = parse_simple_whitespace(config, state) + return ParenthesizedWhitespace(first_line, empty_lines, indent, last_line) diff --git a/libcst/_parser/tests/test_detect_config.py b/libcst/_parser/tests/test_detect_config.py index b17c9fe5..fdda965b 100644 --- a/libcst/_parser/tests/test_detect_config.py +++ b/libcst/_parser/tests/test_detect_config.py @@ -3,12 +3,15 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import dataclasses from typing import Union from libcst._parser.detect_config import detect_config from libcst._parser.parso.utils import PythonVersionInfo -from libcst._parser.types.config import ParserConfig, PartialParserConfig +from libcst._parser.types.config import ( + ParserConfig, + PartialParserConfig, + parser_config_asdict, +) from libcst.testing.utils import UnitTest, data_provider @@ -316,7 +319,7 @@ class TestDetectConfig(UnitTest): expected_config: ParserConfig, ) -> None: self.assertEqual( - dataclasses.asdict( + parser_config_asdict( detect_config( source, partial=partial, @@ -324,5 +327,5 @@ class TestDetectConfig(UnitTest): detect_default_newline=detect_default_newline, ).config ), - dataclasses.asdict(expected_config), + parser_config_asdict(expected_config), ) diff --git a/libcst/_parser/tests/test_footer_behavior.py b/libcst/_parser/tests/test_footer_behavior.py index 23ff4e25..f3df77f8 100644 --- a/libcst/_parser/tests/test_footer_behavior.py +++ b/libcst/_parser/tests/test_footer_behavior.py @@ -15,7 +15,10 @@ class FooterBehaviorTest(UnitTest): @data_provider( { # Literally the most basic example - "simple_module": {"code": "\n", "expected_module": cst.Module(body=())}, + "simple_module": { + "code": "", + "expected_module": cst.Module(body=(), has_trailing_newline=False), + }, # A module with a header comment "header_only_module": { "code": "# This is a header comment\n", diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index 6d651f3b..17bf3581 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -8,6 +8,7 @@ from textwrap import dedent from typing import Callable import libcst as cst +from libcst._parser.entrypoints import is_native from libcst.testing.utils import UnitTest, data_provider @@ -169,4 +170,5 @@ class ParseErrorsTest(UnitTest): ) -> None: with self.assertRaises(cst.ParserSyntaxError) as cm: parse_fn() - self.assertEqual(str(cm.exception), expected) + if not is_native(): + self.assertEqual(str(cm.exception), expected) diff --git a/libcst/_parser/tests/test_whitespace_parser.py b/libcst/_parser/tests/test_whitespace_parser.py index 17996b47..22be2412 100644 --- a/libcst/_parser/tests/test_whitespace_parser.py +++ b/libcst/_parser/tests/test_whitespace_parser.py @@ -3,12 +3,11 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from dataclasses import dataclass -from typing import Callable, Sequence, TypeVar +from typing import Callable, TypeVar import libcst as cst from libcst._nodes.deep_equals import deep_equals -from libcst._parser.types.config import BaseWhitespaceParserConfig +from libcst._parser.types.config import MockWhitespaceParserConfig as Config from libcst._parser.types.whitespace_state import WhitespaceState as State from libcst._parser.whitespace_parser import ( parse_empty_lines, @@ -20,12 +19,6 @@ from libcst.testing.utils import UnitTest, data_provider _T = TypeVar("_T") -@dataclass(frozen=True) -class Config(BaseWhitespaceParserConfig): - lines: Sequence[str] - default_newline: str - - class WhitespaceParserTest(UnitTest): @data_provider( { diff --git a/libcst/_parser/tests/test_wrapped_tokenize.py b/libcst/_parser/tests/test_wrapped_tokenize.py index e131f620..ead00432 100644 --- a/libcst/_parser/tests/test_wrapped_tokenize.py +++ b/libcst/_parser/tests/test_wrapped_tokenize.py @@ -1214,7 +1214,7 @@ class WrappedTokenizeTest(UnitTest): _PY37, ( Token( - type=PythonTokenTypes.NAME, + type=PythonTokenTypes.ASYNC, string="async", start_pos=(1, 0), end_pos=(1, 5), @@ -1364,7 +1364,7 @@ class WrappedTokenizeTest(UnitTest): relative_indent=None, ), Token( - type=PythonTokenTypes.NAME, + type=PythonTokenTypes.AWAIT, string="await", start_pos=(2, 11), end_pos=(2, 16), @@ -1649,7 +1649,7 @@ class WrappedTokenizeTest(UnitTest): _PY38, ( Token( - type=PythonTokenTypes.NAME, + type=PythonTokenTypes.ASYNC, string="async", start_pos=(1, 0), end_pos=(1, 5), @@ -1799,7 +1799,7 @@ class WrappedTokenizeTest(UnitTest): relative_indent=None, ), Token( - type=PythonTokenTypes.NAME, + type=PythonTokenTypes.AWAIT, string="await", start_pos=(2, 11), end_pos=(2, 16), diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index 13778b2a..bf244f1c 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -3,14 +3,12 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. - -import abc import codecs import re import sys from dataclasses import dataclass, field, fields from enum import Enum -from typing import FrozenSet, List, Optional, Pattern, Sequence, Union +from typing import Any, Callable, FrozenSet, List, Mapping, Optional, Pattern, Union from libcst._add_slots import add_slots from libcst._nodes.whitespace import NEWLINE_RE @@ -18,33 +16,21 @@ from libcst._parser.parso.utils import PythonVersionInfo, parse_version_string _INDENT_RE: Pattern[str] = re.compile(r"[ \t]+") +try: + from libcst_native import parser_config as config_mod -class BaseWhitespaceParserConfig(abc.ABC): - """ - Represents the subset of ParserConfig that the whitespace parser requires. This - makes calling the whitespace parser in tests with a mocked configuration easier. - """ + MockWhitespaceParserConfig = config_mod.BaseWhitespaceParserConfig +except ImportError: + from libcst._parser.types import py_config as config_mod - lines: Sequence[str] - default_newline: str + # pyre-fixme[9]: This is a small implementation difference between native and python + MockWhitespaceParserConfig = config_mod.MockWhitespaceParserConfig - -@add_slots # We'll access these properties frequently, so use slots -@dataclass(frozen=True) -class ParserConfig(BaseWhitespaceParserConfig): - """ - An internal configuration object that the python parser passes around. These values - are global to the parsed code and should not change during the lifetime of the - parser object. - """ - - lines: Sequence[str] - encoding: str - default_indent: str - default_newline: str - has_trailing_newline: bool - version: PythonVersionInfo - future_imports: FrozenSet[str] +BaseWhitespaceParserConfig = config_mod.BaseWhitespaceParserConfig +ParserConfig = config_mod.ParserConfig +parser_config_asdict: Callable[ + [ParserConfig], Mapping[str, Any] +] = config_mod.parser_config_asdict class AutoConfig(Enum): diff --git a/libcst/_parser/types/py_config.py b/libcst/_parser/types/py_config.py new file mode 100644 index 00000000..6722a9ea --- /dev/null +++ b/libcst/_parser/types/py_config.py @@ -0,0 +1,54 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from dataclasses import asdict, dataclass +from typing import Any, FrozenSet, Mapping, Sequence + +from libcst._parser.parso.utils import PythonVersionInfo + + +class BaseWhitespaceParserConfig(abc.ABC): + """ + Represents the subset of ParserConfig that the whitespace parser requires. This + makes calling the whitespace parser in tests with a mocked configuration easier. + """ + + lines: Sequence[str] + default_newline: str + + +@dataclass(frozen=True) +class MockWhitespaceParserConfig(BaseWhitespaceParserConfig): + """ + An internal type used by unit tests. + """ + + lines: Sequence[str] + default_newline: str + + +@dataclass(frozen=True) +class ParserConfig(BaseWhitespaceParserConfig): + """ + An internal configuration object that the python parser passes around. These + values are global to the parsed code and should not change during the lifetime + of the parser object. + """ + + lines: Sequence[str] + encoding: str + default_indent: str + default_newline: str + has_trailing_newline: bool + version: PythonVersionInfo + future_imports: FrozenSet[str] + + +def parser_config_asdict(config: ParserConfig) -> Mapping[str, Any]: + """ + An internal helper function used by unit tests to compare configs. + """ + return asdict(config) diff --git a/libcst/_parser/types/py_token.py b/libcst/_parser/types/py_token.py new file mode 100644 index 00000000..60ddb2a2 --- /dev/null +++ b/libcst/_parser/types/py_token.py @@ -0,0 +1,27 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +from dataclasses import dataclass +from typing import Optional, Tuple + +from libcst._add_slots import add_slots +from libcst._parser.parso.python.token import TokenType +from libcst._parser.types.whitespace_state import WhitespaceState + + +@add_slots +@dataclass(frozen=True) +class Token: + type: TokenType + string: str + # The start of where `string` is in the source, not including leading whitespace. + start_pos: Tuple[int, int] + # The end of where `string` is in the source, not including trailing whitespace. + end_pos: Tuple[int, int] + whitespace_before: WhitespaceState + whitespace_after: WhitespaceState + # The relative indent this token adds. + relative_indent: Optional[str] diff --git a/libcst/_parser/types/py_whitespace_state.py b/libcst/_parser/types/py_whitespace_state.py new file mode 100644 index 00000000..41244b98 --- /dev/null +++ b/libcst/_parser/types/py_whitespace_state.py @@ -0,0 +1,36 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from dataclasses import dataclass + +from libcst._add_slots import add_slots + + +@add_slots +@dataclass(frozen=False) +class WhitespaceState: + """ + A frequently mutated store of the whitespace parser's current state. This object + must be cloned prior to speculative parsing. + + This is in contrast to the `config` object each whitespace parser function takes, + which is frozen and never mutated. + + Whitespace parsing works by mutating this state object. By encapsulating saving, and + re-using state objects inside the top-level python parser, the whitespace parser is + able to be reentrant. One 'convert' function can consume part of the whitespace, and + another 'convert' function can consume the rest, depending on who owns what + whitespace. + + This is similar to the approach you might take to parse nested languages (e.g. + JavaScript inside of HTML). We're treating whitespace as a separate language and + grammar from the rest of Python's grammar. + """ + + line: int # one-indexed (to match parso's behavior) + column: int # zero-indexed (to match parso's behavior) + # What to look for when executing `_parse_indent`. + absolute_indent: str + is_parenthesized: bool diff --git a/libcst/_parser/types/token.py b/libcst/_parser/types/token.py index 60ddb2a2..88d50b25 100644 --- a/libcst/_parser/types/token.py +++ b/libcst/_parser/types/token.py @@ -4,24 +4,9 @@ # LICENSE file in the root directory of this source tree. -from dataclasses import dataclass -from typing import Optional, Tuple +try: + from libcst_native import tokenize -from libcst._add_slots import add_slots -from libcst._parser.parso.python.token import TokenType -from libcst._parser.types.whitespace_state import WhitespaceState - - -@add_slots -@dataclass(frozen=True) -class Token: - type: TokenType - string: str - # The start of where `string` is in the source, not including leading whitespace. - start_pos: Tuple[int, int] - # The end of where `string` is in the source, not including trailing whitespace. - end_pos: Tuple[int, int] - whitespace_before: WhitespaceState - whitespace_after: WhitespaceState - # The relative indent this token adds. - relative_indent: Optional[str] + Token = tokenize.Token +except ImportError: + from libcst._parser.types.py_token import Token # noqa F401 diff --git a/libcst/_parser/types/whitespace_state.py b/libcst/_parser/types/whitespace_state.py index b5554a2b..a9798054 100644 --- a/libcst/_parser/types/whitespace_state.py +++ b/libcst/_parser/types/whitespace_state.py @@ -7,34 +7,9 @@ Defines the state object used by the whitespace parser. """ -from dataclasses import dataclass +try: + from libcst_native import whitespace_state as mod +except ImportError: + from libcst._parser.types import py_whitespace_state as mod -from libcst._add_slots import add_slots - - -@add_slots -@dataclass(frozen=False) -class WhitespaceState: - """ - A frequently mutated store of the whitespace parser's current state. This object - must be cloned prior to speculative parsing. - - This is in contrast to the `config` object each whitespace parser function takes, - which is frozen and never mutated. - - Whitespace parsing works by mutating this state object. By encapsulating saving, and - re-using state objects inside the top-level python parser, the whitespace parser is - able to be reentrant. One 'convert' function can consume part of the whitespace, and - another 'convert' function can consume the rest, depending on who owns what - whitespace. - - This is similar to the approach you might take to parse nested languages (e.g. - JavaScript inside of HTML). We're treating whitespace as a separate language and - grammar from the rest of Python's grammar. - """ - - line: int # one-indexed (to match parso's behavior) - column: int # zero-indexed (to match parso's behavior) - # What to look for when executing `_parse_indent`. - absolute_indent: str - is_parenthesized: bool +WhitespaceState = mod.WhitespaceState diff --git a/libcst/_parser/whitespace_parser.py b/libcst/_parser/whitespace_parser.py index 27892a27..9ffb6a7d 100644 --- a/libcst/_parser/whitespace_parser.py +++ b/libcst/_parser/whitespace_parser.py @@ -5,7 +5,7 @@ """ Parso doesn't attempt to parse (or even emit tokens for) whitespace or comments that -isn't syntatically important. Instead, we're just given the whitespace as a "prefix" of +aren't syntatically important. Instead, we're just given the whitespace as a "prefix" of the token. However, in our CST, whitespace is gathered into far more detailed objects than a simple @@ -15,258 +15,19 @@ Fortunately this isn't hard for us to parse ourselves, so we just use our own hand-rolled recursive descent parser. """ -from typing import List, Optional, Sequence, Tuple, Union +try: + # It'd be better to do `from libcst_native.whitespace_parser import *`, but we're + # blocked on https://github.com/PyO3/pyo3/issues/759 + # (which ultimately seems to be a limitation of how importlib works) + from libcst_native import whitespace_parser as mod +except ImportError: + from libcst._parser import py_whitespace_parser as mod -from libcst._nodes.whitespace import ( - COMMENT_RE, - NEWLINE_RE, - SIMPLE_WHITESPACE_RE, - Comment, - EmptyLine, - Newline, - ParenthesizedWhitespace, - SimpleWhitespace, - TrailingWhitespace, -) -from libcst._parser.types.config import BaseWhitespaceParserConfig -from libcst._parser.types.whitespace_state import WhitespaceState as State - -# BEGIN PARSER ENTRYPOINTS - - -def parse_simple_whitespace( - config: BaseWhitespaceParserConfig, state: State -) -> SimpleWhitespace: - # The match never fails because the pattern can match an empty string - lines = config.lines - # pyre-fixme[16]: Optional type has no attribute `group`. - ws_line = SIMPLE_WHITESPACE_RE.match(lines[state.line - 1], state.column).group(0) - ws_line_list = [ws_line] - while "\\" in ws_line: - # continuation character - state.line += 1 - state.column = 0 - ws_line = SIMPLE_WHITESPACE_RE.match(lines[state.line - 1], state.column).group( - 0 - ) - ws_line_list.append(ws_line) - - # TODO: we could special-case the common case where there's no continuation - # character to avoid list construction and joining. - - # once we've finished collecting continuation characters - state.column += len(ws_line) - return SimpleWhitespace("".join(ws_line_list)) - - -def parse_empty_lines( - config: BaseWhitespaceParserConfig, - state: State, - *, - override_absolute_indent: Optional[str] = None, -) -> Sequence[EmptyLine]: - # If override_absolute_indent is true, then we need to parse all lines up - # to and including the last line that is indented at our level. These all - # belong to the footer and not to the next line's leading_lines. All lines - # that have indent=False and come after the last line where indent=True - # do not belong to this node. - state_for_line = State( - state.line, state.column, state.absolute_indent, state.is_parenthesized - ) - lines: List[Tuple[State, EmptyLine]] = [] - while True: - el = _parse_empty_line( - config, state_for_line, override_absolute_indent=override_absolute_indent - ) - if el is None: - break - - # Store the updated state with the element we parsed. Then make a new state - # clone for the next element. - lines.append((state_for_line, el)) - state_for_line = State( - state_for_line.line, - state_for_line.column, - state.absolute_indent, - state.is_parenthesized, - ) - - if override_absolute_indent is not None: - # We need to find the last element that is indented, and then split the list - # at that point. - for i in range(len(lines) - 1, -1, -1): - if lines[i][1].indent: - lines = lines[: (i + 1)] - break - else: - # We didn't find any lines, throw them all away - lines = [] - - if lines: - # Update the state line and column to match the last line actually parsed. - final_state: State = lines[-1][0] - state.line = final_state.line - state.column = final_state.column - return [r[1] for r in lines] - - -def parse_trailing_whitespace( - config: BaseWhitespaceParserConfig, state: State -) -> TrailingWhitespace: - trailing_whitespace = _parse_trailing_whitespace(config, state) - if trailing_whitespace is None: - raise Exception( - "Internal Error: Failed to parse TrailingWhitespace. This should never " - + "happen because a TrailingWhitespace is never optional in the grammar, " - + "so this error should've been caught by parso first." - ) - return trailing_whitespace - - -def parse_parenthesizable_whitespace( - config: BaseWhitespaceParserConfig, state: State -) -> Union[SimpleWhitespace, ParenthesizedWhitespace]: - if state.is_parenthesized: - # First, try parenthesized (don't need speculation because it either - # parses or doesn't modify state). - parenthesized_whitespace = _parse_parenthesized_whitespace(config, state) - if parenthesized_whitespace is not None: - return parenthesized_whitespace - # Now, just parse and return a simple whitespace - return parse_simple_whitespace(config, state) - - -# END PARSER ENTRYPOINTS -# BEGIN PARSER INTERNAL PRODUCTIONS - - -def _parse_empty_line( - config: BaseWhitespaceParserConfig, - state: State, - *, - override_absolute_indent: Optional[str] = None, -) -> Optional[EmptyLine]: - # begin speculative parsing - speculative_state = State( - state.line, state.column, state.absolute_indent, state.is_parenthesized - ) - try: - indent = _parse_indent( - config, speculative_state, override_absolute_indent=override_absolute_indent - ) - except Exception: - # We aren't on a new line, speculative parsing failed - return None - whitespace = parse_simple_whitespace(config, speculative_state) - comment = _parse_comment(config, speculative_state) - newline = _parse_newline(config, speculative_state) - if newline is None: - # speculative parsing failed - return None - # speculative parsing succeeded - state.line = speculative_state.line - state.column = speculative_state.column - # don't need to copy absolute_indent/is_parenthesized because they don't change. - return EmptyLine(indent, whitespace, comment, newline) - - -def _parse_indent( - config: BaseWhitespaceParserConfig, - state: State, - *, - override_absolute_indent: Optional[str] = None, -) -> bool: - """ - Returns True if indentation was found, otherwise False. - """ - absolute_indent = ( - override_absolute_indent - if override_absolute_indent is not None - else state.absolute_indent - ) - line_str = config.lines[state.line - 1] - if state.column != 0: - if state.column == len(line_str) and state.line == len(config.lines): - # We're at EOF, treat this as a failed speculative parse - return False - raise Exception("Internal Error: Column should be 0 when parsing an indent.") - if line_str.startswith(absolute_indent, state.column): - state.column += len(absolute_indent) - return True - return False - - -def _parse_comment( - config: BaseWhitespaceParserConfig, state: State -) -> Optional[Comment]: - comment_match = COMMENT_RE.match(config.lines[state.line - 1], state.column) - if comment_match is None: - return None - comment = comment_match.group(0) - state.column += len(comment) - return Comment(comment) - - -def _parse_newline( - config: BaseWhitespaceParserConfig, state: State -) -> Optional[Newline]: - # begin speculative parsing - line_str = config.lines[state.line - 1] - newline_match = NEWLINE_RE.match(line_str, state.column) - if newline_match is not None: - # speculative parsing succeeded - newline_str = newline_match.group(0) - state.column += len(newline_str) - if state.column != len(line_str): - raise Exception("Internal Error: Found a newline, but it wasn't the EOL.") - if state.line < len(config.lines): - # this newline was the end of a line, and there's another line, - # therefore we should move to the next line - state.line += 1 - state.column = 0 - if newline_str == config.default_newline: - # Just inherit it from the Module instead of explicitly setting it. - return Newline() - else: - return Newline(newline_str) - else: # no newline was found, speculative parsing failed - return None - - -def _parse_trailing_whitespace( - config: BaseWhitespaceParserConfig, state: State -) -> Optional[TrailingWhitespace]: - # Begin speculative parsing - speculative_state = State( - state.line, state.column, state.absolute_indent, state.is_parenthesized - ) - whitespace = parse_simple_whitespace(config, speculative_state) - comment = _parse_comment(config, speculative_state) - newline = _parse_newline(config, speculative_state) - if newline is None: - # Speculative parsing failed - return None - # Speculative parsing succeeded - state.line = speculative_state.line - state.column = speculative_state.column - # don't need to copy absolute_indent/is_parenthesized because they don't change. - return TrailingWhitespace(whitespace, comment, newline) - - -def _parse_parenthesized_whitespace( - config: BaseWhitespaceParserConfig, state: State -) -> Optional[ParenthesizedWhitespace]: - first_line = _parse_trailing_whitespace(config, state) - if first_line is None: - # Speculative parsing failed - return None - empty_lines = () - while True: - empty_line = _parse_empty_line(config, state) - if empty_line is None: - # This isn't an empty line, so parse it below - break - empty_lines = empty_lines + (empty_line,) - indent = _parse_indent(config, state) - last_line = parse_simple_whitespace(config, state) - return ParenthesizedWhitespace(first_line, empty_lines, indent, last_line) +# pyre-fixme[5]: There's no sane way to type these re-exports +parse_simple_whitespace = mod.parse_simple_whitespace +# pyre-fixme[5]: There's no sane way to type these re-exports +parse_empty_lines = mod.parse_empty_lines +# pyre-fixme[5]: There's no sane way to type these re-exports +parse_trailing_whitespace = mod.parse_trailing_whitespace +# pyre-fixme[5]: There's no sane way to type these re-exports +parse_parenthesizable_whitespace = mod.parse_parenthesizable_whitespace diff --git a/libcst/_parser/wrapped_tokenize.py b/libcst/_parser/wrapped_tokenize.py index d77ed68c..ae86c910 100644 --- a/libcst/_parser/wrapped_tokenize.py +++ b/libcst/_parser/wrapped_tokenize.py @@ -22,7 +22,7 @@ tokenize module, instead of as a wrapper. from dataclasses import dataclass, field from enum import Enum -from typing import Generator, List, Optional, Sequence +from typing import Generator, Iterator, List, Optional, Sequence from libcst._add_slots import add_slots from libcst._exceptions import ParserSyntaxError @@ -76,15 +76,30 @@ class _TokenizeState: ) -def tokenize( - code: str, version_info: PythonVersionInfo -) -> Generator[Token, None, None]: - lines = split_lines(code, keepends=True) - return tokenize_lines(lines, version_info) +def tokenize(code: str, version_info: PythonVersionInfo) -> Iterator[Token]: + try: + from libcst_native import tokenize as native_tokenize + + return native_tokenize.tokenize(code) + except ImportError: + lines = split_lines(code, keepends=True) + return tokenize_lines(code, lines, version_info) def tokenize_lines( - lines: Sequence[str], version_info: PythonVersionInfo + code: str, lines: Sequence[str], version_info: PythonVersionInfo +) -> Iterator[Token]: + try: + from libcst_native import tokenize as native_tokenize + + # TODO: pass through version_info + return native_tokenize.tokenize(code) + except ImportError: + return tokenize_lines_py(code, lines, version_info) + + +def tokenize_lines_py( + code: str, lines: Sequence[str], version_info: PythonVersionInfo ) -> Generator[Token, None, None]: state = _TokenizeState(lines) orig_tokens_iter = iter(orig_tokenize_lines(lines, version_info)) diff --git a/libcst/codegen/tests/test_codegen_clean.py b/libcst/codegen/tests/test_codegen_clean.py index dad5166e..c2f18df2 100644 --- a/libcst/codegen/tests/test_codegen_clean.py +++ b/libcst/codegen/tests/test_codegen_clean.py @@ -22,7 +22,7 @@ class TestCodegenClean(UnitTest): """ new_code = clean_generated_code("\n".join(visitor_codegen.generated_code)) new_file = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "visitor_codegen.py.deleteme" + os.path.dirname(os.path.abspath(__file__)), "visitor_codegen.deleteme.py" ) with open(new_file, "w") as fp: fp.write(new_code) @@ -56,7 +56,7 @@ class TestCodegenClean(UnitTest): """ new_code = clean_generated_code("\n".join(matcher_codegen.generated_code)) new_file = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "matcher_codegen.py.deleteme" + os.path.dirname(os.path.abspath(__file__)), "matcher_codegen.deleteme.py" ) with open(new_file, "w") as fp: fp.write(new_code) @@ -90,7 +90,7 @@ class TestCodegenClean(UnitTest): """ new_code = clean_generated_code("\n".join(type_codegen.generated_code)) new_file = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "type_codegen.py.deleteme" + os.path.dirname(os.path.abspath(__file__)), "type_codegen.deleteme.py" ) with open(new_file, "w") as fp: fp.write(new_code) diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 7daa7056..f586ea19 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -9,6 +9,7 @@ import subprocess import sys from pathlib import Path +from libcst._parser.entrypoints import is_native from libcst.testing.utils import UnitTest @@ -27,7 +28,7 @@ class TestCodemodCLI(UnitTest): stderr=subprocess.PIPE, ) version = sys.version_info - if version[0] == 3 and version[1] == 6: + if version[0] == 3 and version[1] == 6 and not is_native(): self.assertIn( "ParserSyntaxError: Syntax Error @ 14:11.", rlt.stderr.decode("utf-8"), diff --git a/libcst/matchers/tests/test_extract.py b/libcst/matchers/tests/test_extract.py index 77c134a8..298f3fec 100644 --- a/libcst/matchers/tests/test_extract.py +++ b/libcst/matchers/tests/test_extract.py @@ -408,9 +408,11 @@ class MatchersExtractTest(UnitTest): ] ), ) - extracted_seq = cst.ensure_type( - cst.ensure_type(expression, cst.Tuple).elements[1].value, cst.Call - ).args + extracted_seq = tuple( + cst.ensure_type( + cst.ensure_type(expression, cst.Tuple).elements[1].value, cst.Call + ).args + ) self.assertEqual(nodes, {"args": extracted_seq}) # Verify false behavior diff --git a/libcst/tests/test_e2e.py b/libcst/tests/test_e2e.py index 1e68ed6c..1b836eab 100644 --- a/libcst/tests/test_e2e.py +++ b/libcst/tests/test_e2e.py @@ -59,6 +59,8 @@ class ToolE2ETest(TestCase): hide_progress=True, ) + print(result) + # Check results self.assertEqual(2, result.successes) self.assertEqual(0, result.skips) diff --git a/native/Cargo.lock b/native/Cargo.lock new file mode 100644 index 00000000..92b17afe --- /dev/null +++ b/native/Cargo.lock @@ -0,0 +1,884 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +dependencies = [ + "memchr", +] + +[[package]] +name = "annotate-snippets" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7021ce4924a3f25f802b2cccd1af585e39ea1a363a1aa2e72afe54b67a3a7a7" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "bstr" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90682c8d613ad3373e66de8c6411e0ae2ab2571e879d2efbf73558cc66f21279" +dependencies = [ + "lazy_static", + "memchr", + "regex-automata", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c59e7af012c713f529e7a3ee57ce9b31ddd858d4b512923602f74608b009631" + +[[package]] +name = "cast" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c24dab4283a142afa2fdca129b80ad2c6284e073930f964c3a1293c225ee39a" +dependencies = [ + "rustc_version", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chic" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5b5db619f3556839cb2223ae86ff3f9a09da2c5013be42bc9af08c9589bf70c" +dependencies = [ + "annotate-snippets", +] + +[[package]] +name = "clap" +version = "2.33.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" +dependencies = [ + "bitflags", + "textwrap", + "unicode-width", +] + +[[package]] +name = "criterion" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab327ed7354547cc2ef43cbe20ef68b988e70b4b593cbd66a2a61733123a3d23" +dependencies = [ + "atty", + "cast", + "clap", + "criterion-plot", + "csv", + "itertools 0.10.1", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_cbor", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-cycles-per-byte" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d34485a578330c7a91ccf064674f3739a7aebbf3b9d7fd498a6d3e8f7473c96" +dependencies = [ + "criterion", +] + +[[package]] +name = "criterion-plot" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e022feadec601fba1649cfa83586381a4ad31c6bf3a9ab7d408118b05dd9889d" +dependencies = [ + "cast", + "itertools 0.9.0", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "lazy_static", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" +dependencies = [ + "cfg-if", + "lazy_static", +] + +[[package]] +name = "csv" +version = "1.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" +dependencies = [ + "bstr", + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +dependencies = [ + "memchr", +] + +[[package]] +name = "difference" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "half" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "indoc" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" +dependencies = [ + "indoc-impl", + "proc-macro-hack", +] + +[[package]] +name = "indoc-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", + "unindent", +] + +[[package]] +name = "instant" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bee0328b1209d157ef001c94dd85b4f8f64139adb0eac2659f4b08382b2f474d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "itertools" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" + +[[package]] +name = "js-sys" +version = "0.3.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83bdfbace3a0e81a4253f73b49e960b053e396a11012cbd49b9b74d6a2b67062" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320cfe77175da3a483efed4bc0adc1968ca050b098ce4f2f1c13a56626128790" + +[[package]] +name = "libcst" +version = "0.1.0" +dependencies = [ + "chic", + "criterion", + "criterion-cycles-per-byte", + "difference", + "itertools 0.10.1", + "libcst_derive", + "once_cell", + "paste 1.0.5", + "peg", + "pyo3", + "regex", + "thiserror", +] + +[[package]] +name = "libcst_derive" +version = "0.1.0" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "lock_api" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0382880606dff6d15c9476c416d18690b72742aa7b605bb6dd6ec9030fbf07eb" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "memchr" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc" + +[[package]] +name = "memoffset" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + +[[package]] +name = "parking_lot" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "paste" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880" +dependencies = [ + "paste-impl", + "proc-macro-hack", +] + +[[package]] +name = "paste" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" + +[[package]] +name = "paste-impl" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6" +dependencies = [ + "proc-macro-hack", +] + +[[package]] +name = "peg" +version = "0.7.0" +source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" +dependencies = [ + "peg-macros", + "peg-runtime", +] + +[[package]] +name = "peg-macros" +version = "0.7.0" +source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" +dependencies = [ + "peg-runtime", + "proc-macro2", + "quote", +] + +[[package]] +name = "peg-runtime" +version = "0.7.0" +source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" + +[[package]] +name = "plotters" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a3fd9ec30b9749ce28cd91f255d569591cdf937fe280c312143e3c4bad6f2a" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d88417318da0eaf0fdcdb51a0ee6c3bed624333bff8f946733049380be67ac1c" + +[[package]] +name = "plotters-svg" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521fa9638fa597e1dc53e9412a4f9cefb01187ee1f7413076f9e6749e2885ba9" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" + +[[package]] +name = "proc-macro2" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "pyo3" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35100f9347670a566a67aa623369293703322bb9db77d99d7df7313b575ae0c8" +dependencies = [ + "cfg-if", + "indoc", + "libc", + "parking_lot", + "paste 0.1.18", + "pyo3-build-config", + "pyo3-macros", + "unindent", +] + +[[package]] +name = "pyo3-build-config" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d12961738cacbd7f91b7c43bc25cfeeaa2698ad07a04b3be0aa88b950865738f" +dependencies = [ + "once_cell", +] + +[[package]] +name = "pyo3-macros" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0bc5215d704824dfddddc03f93cb572e1155c68b6761c37005e1c288808ea8" +dependencies = [ + "pyo3-macros-backend", + "quote", + "syn", +] + +[[package]] +name = "pyo3-macros-backend" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71623fc593224afaab918aa3afcaf86ed2f43d34f6afde7f3922608f253240df" +dependencies = [ + "proc-macro2", + "pyo3-build-config", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rayon" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" +dependencies = [ + "autocfg", + "crossbeam-deque", + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" +dependencies = [ + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-utils", + "lazy_static", + "num_cpus", +] + +[[package]] +name = "redox_syscall" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ab49abadf3f9e1c4bc499e8845e152ad87d2ad2d30371841171169e9d75feee" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" + +[[package]] +name = "regex-syntax" +version = "0.6.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "ryu" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "semver" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f3aac57ee7f3272d8395c6e4f502f434f0e289fcd62876f70daa008c20dcabe" + +[[package]] +name = "serde" +version = "1.0.126" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03" + +[[package]] +name = "serde_cbor" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e18acfa2f90e8b735b2836ab8d538de304cbb6729a7360729ea5a895d15a622" +dependencies = [ + "half", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.126" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "963a7dbc9895aeac7ac90e74f34a5d5261828f79df35cbed41e10189d3804d43" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799e97dc9fdae36a5c8b8f2cae9ce2ee9fdce2058c57a93e6099d919fd982f79" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "smallvec" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" + +[[package]] +name = "syn" +version = "1.0.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1873d832550d4588c3dbc20f01361ab00bfe741048f71e3fecf145a7cc18b29c" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93119e4feac1cbe6c798c34d3a53ea0026b0b1de6a120deef895137c0529bfe2" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "060d69a0afe7796bf42e9e2ff91f5ee691fb15c53d38b4b62a9a53eb23164745" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "unicode-width" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "unindent" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7" + +[[package]] +name = "walkdir" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +dependencies = [ + "same-file", + "winapi", + "winapi-util", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54ee1d4ed486f78874278e63e4069fc1ab9f6a18ca492076ffb90c5eb2997fd" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b33f6a0694ccfea53d94db8b2ed1c3a8a4c86dd936b13b9f0a15ec4a451b900" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "088169ca61430fe1e58b8096c24975251700e7b1f6fd91cc9d59b04fb9b18bd4" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be2241542ff3d9f241f5e2cb6dd09b37efe786df8851c54957683a49f0987a97" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7cff876b8f18eed75a66cf49b65e7f967cb354a7aa16003fb55dbfd25b44b4f" + +[[package]] +name = "web-sys" +version = "0.3.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e828417b379f3df7111d3a2a9e5753706cae29c41f7c4029ee9fd77f3e09e582" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/native/Cargo.toml b/native/Cargo.toml new file mode 100644 index 00000000..3a0b79f7 --- /dev/null +++ b/native/Cargo.toml @@ -0,0 +1,6 @@ +[workspace] + +members = [ + "libcst", + "libcst_derive", +] diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml new file mode 100644 index 00000000..f009e918 --- /dev/null +++ b/native/libcst/Cargo.toml @@ -0,0 +1,47 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +[package] +name = "libcst" +version = "0.1.0" +authors = ["LibCST Developers"] +edition = "2018" + +[lib] +name = "libcst_native" +crate-type = ["cdylib", "rlib"] + +[[bin]] +name = "parse" +path = "src/bin.rs" + +[features] +# This is a bit of a hack, since `cargo test` doesn't work with `extension-module`. +# To run tests, use `cargo test --no-default-features`. +# +# Once https://github.com/PyO3/pyo3/pull/1123 lands, it may be better to use +# `-Zextra-link-arg` for this instead. +default = ["pyo3/extension-module"] +trace = ["peg/trace"] + +[dependencies] +paste = "1.0.4" +pyo3 = "0.14.4" +thiserror = "1.0.23" +peg = { git = "https://github.com/kevinmehall/rust-peg" } +chic = "1.2.2" +itertools = "0.10.0" +once_cell = "1.5.2" +regex = "1.5.4" +libcst_derive = { path = "../libcst_derive" } + +[dev-dependencies] +criterion = { version = "0.3.4", features = ["html_reports"] } +criterion-cycles-per-byte = "0.1" +difference = "2.0.0" + +[[bench]] +name = "parser_benchmark" +harness = false diff --git a/native/libcst/Grammar b/native/libcst/Grammar new file mode 100644 index 00000000..274db713 --- /dev/null +++ b/native/libcst/Grammar @@ -0,0 +1,707 @@ +# PEG grammar for Python 3.9 + +@trailer ''' +void * +_PyPegen_parse(Parser *p) +{ + // Initialize keywords + p->keywords = reserved_keywords; + p->n_keyword_lists = n_keyword_lists; + + // Run parser + void *result = NULL; + if (p->start_rule == Py_file_input) { + result = file_rule(p); + } else if (p->start_rule == Py_single_input) { + result = interactive_rule(p); + } else if (p->start_rule == Py_eval_input) { + result = eval_rule(p); + } else if (p->start_rule == Py_func_type_input) { + result = func_type_rule(p); + } else if (p->start_rule == Py_fstring_input) { + result = fstring_rule(p); + } + + return result; +} + +// The end +''' +file[mod_ty]: a=[statements] ENDMARKER { _PyPegen_make_module(p, a) } +interactive[mod_ty]: a=statement_newline { Interactive(a, p->arena) } +eval[mod_ty]: a=expressions NEWLINE* ENDMARKER { Expression(a, p->arena) } +func_type[mod_ty]: '(' a=[type_expressions] ')' '->' b=expression NEWLINE* ENDMARKER { FunctionType(a, b, p->arena) } +fstring[expr_ty]: star_expressions + +# type_expressions allow */** but ignore them +type_expressions[asdl_seq*]: + | a=','.expression+ ',' '*' b=expression ',' '**' c=expression { + _PyPegen_seq_append_to_end(p, CHECK(_PyPegen_seq_append_to_end(p, a, b)), c) } + | a=','.expression+ ',' '*' b=expression { _PyPegen_seq_append_to_end(p, a, b) } + | a=','.expression+ ',' '**' b=expression { _PyPegen_seq_append_to_end(p, a, b) } + | '*' a=expression ',' '**' b=expression { + _PyPegen_seq_append_to_end(p, CHECK(_PyPegen_singleton_seq(p, a)), b) } + | '*' a=expression { _PyPegen_singleton_seq(p, a) } + | '**' a=expression { _PyPegen_singleton_seq(p, a) } + | ','.expression+ + +statements[asdl_seq*]: a=statement+ { _PyPegen_seq_flatten(p, a) } +statement[asdl_seq*]: a=compound_stmt { _PyPegen_singleton_seq(p, a) } | simple_stmt +statement_newline[asdl_seq*]: + | a=compound_stmt NEWLINE { _PyPegen_singleton_seq(p, a) } + | simple_stmt + | NEWLINE { _PyPegen_singleton_seq(p, CHECK(_Py_Pass(EXTRA))) } + | ENDMARKER { _PyPegen_interactive_exit(p) } +simple_stmt[asdl_seq*]: + | a=small_stmt !';' NEWLINE { _PyPegen_singleton_seq(p, a) } # Not needed, there for speedup + | a=';'.small_stmt+ [';'] NEWLINE { a } +# NOTE: assignment MUST precede expression, else parsing a simple assignment +# will throw a SyntaxError. +small_stmt[stmt_ty] (memo): + | assignment + | e=star_expressions { _Py_Expr(e, EXTRA) } + | &'return' return_stmt + | &('import' | 'from') import_stmt + | &'raise' raise_stmt + | 'pass' { _Py_Pass(EXTRA) } + | &'del' del_stmt + | &'yield' yield_stmt + | &'assert' assert_stmt + | 'break' { _Py_Break(EXTRA) } + | 'continue' { _Py_Continue(EXTRA) } + | &'global' global_stmt + | &'nonlocal' nonlocal_stmt +compound_stmt[stmt_ty]: + | &('def' | '@' | ASYNC) function_def + | &'if' if_stmt + | &('class' | '@') class_def + | &('with' | ASYNC) with_stmt + | &('for' | ASYNC) for_stmt + | &'try' try_stmt + | &'while' while_stmt + +# NOTE: annotated_rhs may start with 'yield'; yield_expr must start with 'yield' +assignment[stmt_ty]: + | a=NAME ':' b=expression c=['=' d=annotated_rhs { d }] { + CHECK_VERSION( + 6, + "Variable annotation syntax is", + _Py_AnnAssign(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, c, 1, EXTRA) + ) } + | a=('(' b=single_target ')' { b } + | single_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] { + CHECK_VERSION(6, "Variable annotations syntax is", _Py_AnnAssign(a, b, c, 0, EXTRA)) } + | a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) !'=' tc=[TYPE_COMMENT] { + _Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } + | a=single_target b=augassign ~ c=(yield_expr | star_expressions) { + _Py_AugAssign(a, b->kind, c, EXTRA) } + | invalid_assignment + +augassign[AugOperator*]: + | '+=' { _PyPegen_augoperator(p, Add) } + | '-=' { _PyPegen_augoperator(p, Sub) } + | '*=' { _PyPegen_augoperator(p, Mult) } + | '@=' { CHECK_VERSION(5, "The '@' operator is", _PyPegen_augoperator(p, MatMult)) } + | '/=' { _PyPegen_augoperator(p, Div) } + | '%=' { _PyPegen_augoperator(p, Mod) } + | '&=' { _PyPegen_augoperator(p, BitAnd) } + | '|=' { _PyPegen_augoperator(p, BitOr) } + | '^=' { _PyPegen_augoperator(p, BitXor) } + | '<<=' { _PyPegen_augoperator(p, LShift) } + | '>>=' { _PyPegen_augoperator(p, RShift) } + | '**=' { _PyPegen_augoperator(p, Pow) } + | '//=' { _PyPegen_augoperator(p, FloorDiv) } + +global_stmt[stmt_ty]: 'global' a=','.NAME+ { + _Py_Global(CHECK(_PyPegen_map_names_to_ids(p, a)), EXTRA) } +nonlocal_stmt[stmt_ty]: 'nonlocal' a=','.NAME+ { + _Py_Nonlocal(CHECK(_PyPegen_map_names_to_ids(p, a)), EXTRA) } + +yield_stmt[stmt_ty]: y=yield_expr { _Py_Expr(y, EXTRA) } + +assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] { _Py_Assert(a, b, EXTRA) } + +del_stmt[stmt_ty]: + | 'del' a=del_targets &(';' | NEWLINE) { _Py_Delete(a, EXTRA) } + | invalid_del_stmt + +import_stmt[stmt_ty]: import_name | import_from +import_name[stmt_ty]: 'import' a=dotted_as_names { _Py_Import(a, EXTRA) } +# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS +import_from[stmt_ty]: + | 'from' a=('.' | '...')* b=dotted_name 'import' c=import_from_targets { + _Py_ImportFrom(b->v.Name.id, c, _PyPegen_seq_count_dots(a), EXTRA) } + | 'from' a=('.' | '...')+ 'import' b=import_from_targets { + _Py_ImportFrom(NULL, b, _PyPegen_seq_count_dots(a), EXTRA) } +import_from_targets[asdl_seq*]: + | '(' a=import_from_as_names [','] ')' { a } + | import_from_as_names !',' + | '*' { _PyPegen_singleton_seq(p, CHECK(_PyPegen_alias_for_star(p))) } + | invalid_import_from_targets +import_from_as_names[asdl_seq*]: + | a=','.import_from_as_name+ { a } +import_from_as_name[alias_ty]: + | a=NAME b=['as' z=NAME { z }] { _Py_alias(a->v.Name.id, + (b) ? ((expr_ty) b)->v.Name.id : NULL, + p->arena) } +dotted_as_names[asdl_seq*]: + | a=','.dotted_as_name+ { a } +dotted_as_name[alias_ty]: + | a=dotted_name b=['as' z=NAME { z }] { _Py_alias(a->v.Name.id, + (b) ? ((expr_ty) b)->v.Name.id : NULL, + p->arena) } +dotted_name[expr_ty]: + | a=dotted_name '.' b=NAME { _PyPegen_join_names_with_dot(p, a, b) } + | NAME + +if_stmt[stmt_ty]: + | 'if' a=named_expression ':' b=block c=elif_stmt { _Py_If(a, b, CHECK(_PyPegen_singleton_seq(p, c)), EXTRA) } + | 'if' a=named_expression ':' b=block c=[else_block] { _Py_If(a, b, c, EXTRA) } +elif_stmt[stmt_ty]: + | 'elif' a=named_expression ':' b=block c=elif_stmt { _Py_If(a, b, CHECK(_PyPegen_singleton_seq(p, c)), EXTRA) } + | 'elif' a=named_expression ':' b=block c=[else_block] { _Py_If(a, b, c, EXTRA) } +else_block[asdl_seq*]: 'else' ':' b=block { b } + +while_stmt[stmt_ty]: + | 'while' a=named_expression ':' b=block c=[else_block] { _Py_While(a, b, c, EXTRA) } + +for_stmt[stmt_ty]: + | 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { + _Py_For(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA) } + | ASYNC 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { + CHECK_VERSION(5, "Async for loops are", _Py_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) } + | invalid_for_target + +with_stmt[stmt_ty]: + | 'with' '(' a=','.with_item+ ','? ')' ':' b=block { + _Py_With(a, b, NULL, EXTRA) } + | 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { + _Py_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } + | ASYNC 'with' '(' a=','.with_item+ ','? ')' ':' b=block { + CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NULL, EXTRA)) } + | ASYNC 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { + CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) } +with_item[withitem_ty]: + | e=expression 'as' t=star_target &(',' | ')' | ':') { _Py_withitem(e, t, p->arena) } + | invalid_with_item + | e=expression { _Py_withitem(e, NULL, p->arena) } + +try_stmt[stmt_ty]: + | 'try' ':' b=block f=finally_block { _Py_Try(b, NULL, NULL, f, EXTRA) } + | 'try' ':' b=block ex=except_block+ el=[else_block] f=[finally_block] { _Py_Try(b, ex, el, f, EXTRA) } +except_block[excepthandler_ty]: + | 'except' e=expression t=['as' z=NAME { z }] ':' b=block { + _Py_ExceptHandler(e, (t) ? ((expr_ty) t)->v.Name.id : NULL, b, EXTRA) } + | 'except' ':' b=block { _Py_ExceptHandler(NULL, NULL, b, EXTRA) } +finally_block[asdl_seq*]: 'finally' ':' a=block { a } + +return_stmt[stmt_ty]: + | 'return' a=[star_expressions] { _Py_Return(a, EXTRA) } + +raise_stmt[stmt_ty]: + | 'raise' a=expression b=['from' z=expression { z }] { _Py_Raise(a, b, EXTRA) } + | 'raise' { _Py_Raise(NULL, NULL, EXTRA) } + +function_def[stmt_ty]: + | d=decorators f=function_def_raw { _PyPegen_function_def_decorators(p, d, f) } + | function_def_raw + +function_def_raw[stmt_ty]: + | 'def' n=NAME '(' params=[params] ')' a=['->' z=expression { z }] ':' tc=[func_type_comment] b=block { + _Py_FunctionDef(n->v.Name.id, + (params) ? params : CHECK(_PyPegen_empty_arguments(p)), + b, NULL, a, NEW_TYPE_COMMENT(p, tc), EXTRA) } + | ASYNC 'def' n=NAME '(' params=[params] ')' a=['->' z=expression { z }] ':' tc=[func_type_comment] b=block { + CHECK_VERSION( + 5, + "Async functions are", + _Py_AsyncFunctionDef(n->v.Name.id, + (params) ? params : CHECK(_PyPegen_empty_arguments(p)), + b, NULL, a, NEW_TYPE_COMMENT(p, tc), EXTRA) + ) } +func_type_comment[Token*]: + | NEWLINE t=TYPE_COMMENT &(NEWLINE INDENT) { t } # Must be followed by indented block + | invalid_double_type_comments + | TYPE_COMMENT + +params[arguments_ty]: + | invalid_parameters + | parameters + +parameters[arguments_ty]: + | a=slash_no_default b=param_no_default* c=param_with_default* d=[star_etc] { + _PyPegen_make_arguments(p, a, NULL, b, c, d) } + | a=slash_with_default b=param_with_default* c=[star_etc] { + _PyPegen_make_arguments(p, NULL, a, NULL, b, c) } + | a=param_no_default+ b=param_with_default* c=[star_etc] { + _PyPegen_make_arguments(p, NULL, NULL, a, b, c) } + | a=param_with_default+ b=[star_etc] { _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)} + | a=star_etc { _PyPegen_make_arguments(p, NULL, NULL, NULL, NULL, a) } + +# Some duplication here because we can't write (',' | &')'), +# which is because we don't support empty alternatives (yet). +# +slash_no_default[asdl_seq*]: + | a=param_no_default+ '/' ',' { a } + | a=param_no_default+ '/' &')' { a } +slash_with_default[SlashWithDefault*]: + | a=param_no_default* b=param_with_default+ '/' ',' { _PyPegen_slash_with_default(p, a, b) } + | a=param_no_default* b=param_with_default+ '/' &')' { _PyPegen_slash_with_default(p, a, b) } + +star_etc[StarEtc*]: + | '*' a=param_no_default b=param_maybe_default* c=[kwds] { + _PyPegen_star_etc(p, a, b, c) } + | '*' ',' b=param_maybe_default+ c=[kwds] { + _PyPegen_star_etc(p, NULL, b, c) } + | a=kwds { _PyPegen_star_etc(p, NULL, NULL, a) } + | invalid_star_etc + +kwds[arg_ty]: '**' a=param_no_default { a } + +# One parameter. This *includes* a following comma and type comment. +# +# There are three styles: +# - No default +# - With default +# - Maybe with default +# +# There are two alternative forms of each, to deal with type comments: +# - Ends in a comma followed by an optional type comment +# - No comma, optional type comment, must be followed by close paren +# The latter form is for a final parameter without trailing comma. +# +param_no_default[arg_ty]: + | a=param ',' tc=TYPE_COMMENT? { _PyPegen_add_type_comment_to_arg(p, a, tc) } + | a=param tc=TYPE_COMMENT? &')' { _PyPegen_add_type_comment_to_arg(p, a, tc) } +param_with_default[NameDefaultPair*]: + | a=param c=default ',' tc=TYPE_COMMENT? { _PyPegen_name_default_pair(p, a, c, tc) } + | a=param c=default tc=TYPE_COMMENT? &')' { _PyPegen_name_default_pair(p, a, c, tc) } +param_maybe_default[NameDefaultPair*]: + | a=param c=default? ',' tc=TYPE_COMMENT? { _PyPegen_name_default_pair(p, a, c, tc) } + | a=param c=default? tc=TYPE_COMMENT? &')' { _PyPegen_name_default_pair(p, a, c, tc) } +param[arg_ty]: a=NAME b=annotation? { _Py_arg(a->v.Name.id, b, NULL, EXTRA) } + +annotation[expr_ty]: ':' a=expression { a } +default[expr_ty]: '=' a=expression { a } + +decorators[asdl_seq*]: a=('@' f=named_expression NEWLINE { f })+ { a } + +class_def[stmt_ty]: + | a=decorators b=class_def_raw { _PyPegen_class_def_decorators(p, a, b) } + | class_def_raw +class_def_raw[stmt_ty]: + | 'class' a=NAME b=['(' z=[arguments] ')' { z }] ':' c=block { + _Py_ClassDef(a->v.Name.id, + (b) ? ((expr_ty) b)->v.Call.args : NULL, + (b) ? ((expr_ty) b)->v.Call.keywords : NULL, + c, NULL, EXTRA) } + +block[asdl_seq*] (memo): + | NEWLINE INDENT a=statements DEDENT { a } + | simple_stmt + | invalid_block + +star_expressions[expr_ty]: + | a=star_expression b=(',' c=star_expression { c })+ [','] { + _Py_Tuple(CHECK(_PyPegen_seq_insert_in_front(p, a, b)), Load, EXTRA) } + | a=star_expression ',' { _Py_Tuple(CHECK(_PyPegen_singleton_seq(p, a)), Load, EXTRA) } + | star_expression +star_expression[expr_ty] (memo): + | '*' a=bitwise_or { _Py_Starred(a, Load, EXTRA) } + | expression + +star_named_expressions[asdl_seq*]: a=','.star_named_expression+ [','] { a } +star_named_expression[expr_ty]: + | '*' a=bitwise_or { _Py_Starred(a, Load, EXTRA) } + | named_expression +named_expression[expr_ty]: + | a=NAME ':=' ~ b=expression { _Py_NamedExpr(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, EXTRA) } + | expression !':=' + | invalid_named_expression + +annotated_rhs[expr_ty]: yield_expr | star_expressions + +expressions[expr_ty]: + | a=expression b=(',' c=expression { c })+ [','] { + _Py_Tuple(CHECK(_PyPegen_seq_insert_in_front(p, a, b)), Load, EXTRA) } + | a=expression ',' { _Py_Tuple(CHECK(_PyPegen_singleton_seq(p, a)), Load, EXTRA) } + | expression +expression[expr_ty] (memo): + | a=disjunction 'if' b=disjunction 'else' c=expression { _Py_IfExp(b, a, c, EXTRA) } + | disjunction + | lambdef + +lambdef[expr_ty]: + | 'lambda' a=[lambda_params] ':' b=expression { _Py_Lambda((a) ? a : CHECK(_PyPegen_empty_arguments(p)), b, EXTRA) } + +lambda_params[arguments_ty]: + | invalid_lambda_parameters + | lambda_parameters + +# lambda_parameters etc. duplicates parameters but without annotations +# or type comments, and if there's no comma after a parameter, we expect +# a colon, not a close parenthesis. (For more, see parameters above.) +# +lambda_parameters[arguments_ty]: + | a=lambda_slash_no_default b=lambda_param_no_default* c=lambda_param_with_default* d=[lambda_star_etc] { + _PyPegen_make_arguments(p, a, NULL, b, c, d) } + | a=lambda_slash_with_default b=lambda_param_with_default* c=[lambda_star_etc] { + _PyPegen_make_arguments(p, NULL, a, NULL, b, c) } + | a=lambda_param_no_default+ b=lambda_param_with_default* c=[lambda_star_etc] { + _PyPegen_make_arguments(p, NULL, NULL, a, b, c) } + | a=lambda_param_with_default+ b=[lambda_star_etc] { _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)} + | a=lambda_star_etc { _PyPegen_make_arguments(p, NULL, NULL, NULL, NULL, a) } + +lambda_slash_no_default[asdl_seq*]: + | a=lambda_param_no_default+ '/' ',' { a } + | a=lambda_param_no_default+ '/' &':' { a } +lambda_slash_with_default[SlashWithDefault*]: + | a=lambda_param_no_default* b=lambda_param_with_default+ '/' ',' { _PyPegen_slash_with_default(p, a, b) } + | a=lambda_param_no_default* b=lambda_param_with_default+ '/' &':' { _PyPegen_slash_with_default(p, a, b) } + +lambda_star_etc[StarEtc*]: + | '*' a=lambda_param_no_default b=lambda_param_maybe_default* c=[lambda_kwds] { + _PyPegen_star_etc(p, a, b, c) } + | '*' ',' b=lambda_param_maybe_default+ c=[lambda_kwds] { + _PyPegen_star_etc(p, NULL, b, c) } + | a=lambda_kwds { _PyPegen_star_etc(p, NULL, NULL, a) } + | invalid_lambda_star_etc + +lambda_kwds[arg_ty]: '**' a=lambda_param_no_default { a } + +lambda_param_no_default[arg_ty]: + | a=lambda_param ',' { a } + | a=lambda_param &':' { a } +lambda_param_with_default[NameDefaultPair*]: + | a=lambda_param c=default ',' { _PyPegen_name_default_pair(p, a, c, NULL) } + | a=lambda_param c=default &':' { _PyPegen_name_default_pair(p, a, c, NULL) } +lambda_param_maybe_default[NameDefaultPair*]: + | a=lambda_param c=default? ',' { _PyPegen_name_default_pair(p, a, c, NULL) } + | a=lambda_param c=default? &':' { _PyPegen_name_default_pair(p, a, c, NULL) } +lambda_param[arg_ty]: a=NAME { _Py_arg(a->v.Name.id, NULL, NULL, EXTRA) } + +disjunction[expr_ty] (memo): + | a=conjunction b=('or' c=conjunction { c })+ { _Py_BoolOp( + Or, + CHECK(_PyPegen_seq_insert_in_front(p, a, b)), + EXTRA) } + | conjunction +conjunction[expr_ty] (memo): + | a=inversion b=('and' c=inversion { c })+ { _Py_BoolOp( + And, + CHECK(_PyPegen_seq_insert_in_front(p, a, b)), + EXTRA) } + | inversion +inversion[expr_ty] (memo): + | 'not' a=inversion { _Py_UnaryOp(Not, a, EXTRA) } + | comparison +comparison[expr_ty]: + | a=bitwise_or b=compare_op_bitwise_or_pair+ { + _Py_Compare(a, CHECK(_PyPegen_get_cmpops(p, b)), CHECK(_PyPegen_get_exprs(p, b)), EXTRA) } + | bitwise_or +compare_op_bitwise_or_pair[CmpopExprPair*]: + | eq_bitwise_or + | noteq_bitwise_or + | lte_bitwise_or + | lt_bitwise_or + | gte_bitwise_or + | gt_bitwise_or + | notin_bitwise_or + | in_bitwise_or + | isnot_bitwise_or + | is_bitwise_or +eq_bitwise_or[CmpopExprPair*]: '==' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, Eq, a) } +noteq_bitwise_or[CmpopExprPair*]: + | (tok='!=' { _PyPegen_check_barry_as_flufl(p, tok) ? NULL : tok}) a=bitwise_or {_PyPegen_cmpop_expr_pair(p, NotEq, a) } +lte_bitwise_or[CmpopExprPair*]: '<=' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, LtE, a) } +lt_bitwise_or[CmpopExprPair*]: '<' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, Lt, a) } +gte_bitwise_or[CmpopExprPair*]: '>=' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, GtE, a) } +gt_bitwise_or[CmpopExprPair*]: '>' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, Gt, a) } +notin_bitwise_or[CmpopExprPair*]: 'not' 'in' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, NotIn, a) } +in_bitwise_or[CmpopExprPair*]: 'in' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, In, a) } +isnot_bitwise_or[CmpopExprPair*]: 'is' 'not' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, IsNot, a) } +is_bitwise_or[CmpopExprPair*]: 'is' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, Is, a) } + +bitwise_or[expr_ty]: + | a=bitwise_or '|' b=bitwise_xor { _Py_BinOp(a, BitOr, b, EXTRA) } + | bitwise_xor +bitwise_xor[expr_ty]: + | a=bitwise_xor '^' b=bitwise_and { _Py_BinOp(a, BitXor, b, EXTRA) } + | bitwise_and +bitwise_and[expr_ty]: + | a=bitwise_and '&' b=shift_expr { _Py_BinOp(a, BitAnd, b, EXTRA) } + | shift_expr +shift_expr[expr_ty]: + | a=shift_expr '<<' b=sum { _Py_BinOp(a, LShift, b, EXTRA) } + | a=shift_expr '>>' b=sum { _Py_BinOp(a, RShift, b, EXTRA) } + | sum + +sum[expr_ty]: + | a=sum '+' b=term { _Py_BinOp(a, Add, b, EXTRA) } + | a=sum '-' b=term { _Py_BinOp(a, Sub, b, EXTRA) } + | term +term[expr_ty]: + | a=term '*' b=factor { _Py_BinOp(a, Mult, b, EXTRA) } + | a=term '/' b=factor { _Py_BinOp(a, Div, b, EXTRA) } + | a=term '//' b=factor { _Py_BinOp(a, FloorDiv, b, EXTRA) } + | a=term '%' b=factor { _Py_BinOp(a, Mod, b, EXTRA) } + | a=term '@' b=factor { CHECK_VERSION(5, "The '@' operator is", _Py_BinOp(a, MatMult, b, EXTRA)) } + | factor +factor[expr_ty] (memo): + | '+' a=factor { _Py_UnaryOp(UAdd, a, EXTRA) } + | '-' a=factor { _Py_UnaryOp(USub, a, EXTRA) } + | '~' a=factor { _Py_UnaryOp(Invert, a, EXTRA) } + | power +power[expr_ty]: + | a=await_primary '**' b=factor { _Py_BinOp(a, Pow, b, EXTRA) } + | await_primary +await_primary[expr_ty] (memo): + | AWAIT a=primary { CHECK_VERSION(5, "Await expressions are", _Py_Await(a, EXTRA)) } + | primary +primary[expr_ty]: + | invalid_primary # must be before 'primay genexp' because of invalid_genexp + | a=primary '.' b=NAME { _Py_Attribute(a, b->v.Name.id, Load, EXTRA) } + | a=primary b=genexp { _Py_Call(a, CHECK(_PyPegen_singleton_seq(p, b)), NULL, EXTRA) } + | a=primary '(' b=[arguments] ')' { + _Py_Call(a, + (b) ? ((expr_ty) b)->v.Call.args : NULL, + (b) ? ((expr_ty) b)->v.Call.keywords : NULL, + EXTRA) } + | a=primary '[' b=slices ']' { _Py_Subscript(a, b, Load, EXTRA) } + | atom + +slices[expr_ty]: + | a=slice !',' { a } + | a=','.slice+ [','] { _Py_Tuple(a, Load, EXTRA) } +slice[expr_ty]: + | a=[expression] ':' b=[expression] c=[':' d=[expression] { d }] { _Py_Slice(a, b, c, EXTRA) } + | a=expression { a } +atom[expr_ty]: + | NAME + | 'True' { _Py_Constant(Py_True, NULL, EXTRA) } + | 'False' { _Py_Constant(Py_False, NULL, EXTRA) } + | 'None' { _Py_Constant(Py_None, NULL, EXTRA) } + | '__peg_parser__' { RAISE_SYNTAX_ERROR("You found it!") } + | &STRING strings + | NUMBER + | &'(' (tuple | group | genexp) + | &'[' (list | listcomp) + | &'{' (dict | set | dictcomp | setcomp) + | '...' { _Py_Constant(Py_Ellipsis, NULL, EXTRA) } + +strings[expr_ty] (memo): a=STRING+ { _PyPegen_concatenate_strings(p, a) } +list[expr_ty]: + | '[' a=[star_named_expressions] ']' { _Py_List(a, Load, EXTRA) } +listcomp[expr_ty]: + | '[' a=named_expression ~ b=for_if_clauses ']' { _Py_ListComp(a, b, EXTRA) } + | invalid_comprehension +tuple[expr_ty]: + | '(' a=[y=star_named_expression ',' z=[star_named_expressions] { _PyPegen_seq_insert_in_front(p, y, z) } ] ')' { + _Py_Tuple(a, Load, EXTRA) } +group[expr_ty]: + | '(' a=(yield_expr | named_expression) ')' { a } + | invalid_group +genexp[expr_ty]: + | '(' a=named_expression ~ b=for_if_clauses ')' { _Py_GeneratorExp(a, b, EXTRA) } + | invalid_comprehension +set[expr_ty]: '{' a=star_named_expressions '}' { _Py_Set(a, EXTRA) } +setcomp[expr_ty]: + | '{' a=named_expression ~ b=for_if_clauses '}' { _Py_SetComp(a, b, EXTRA) } + | invalid_comprehension +dict[expr_ty]: + | '{' a=[double_starred_kvpairs] '}' { + _Py_Dict(CHECK(_PyPegen_get_keys(p, a)), CHECK(_PyPegen_get_values(p, a)), EXTRA) } +dictcomp[expr_ty]: + | '{' a=kvpair b=for_if_clauses '}' { _Py_DictComp(a->key, a->value, b, EXTRA) } + | invalid_dict_comprehension +double_starred_kvpairs[asdl_seq*]: a=','.double_starred_kvpair+ [','] { a } +double_starred_kvpair[KeyValuePair*]: + | '**' a=bitwise_or { _PyPegen_key_value_pair(p, NULL, a) } + | kvpair +kvpair[KeyValuePair*]: a=expression ':' b=expression { _PyPegen_key_value_pair(p, a, b) } +for_if_clauses[asdl_seq*]: + | for_if_clause+ +for_if_clause[comprehension_ty]: + | ASYNC 'for' a=star_targets 'in' ~ b=disjunction c=('if' z=disjunction { z })* { + CHECK_VERSION(6, "Async comprehensions are", _Py_comprehension(a, b, c, 1, p->arena)) } + | 'for' a=star_targets 'in' ~ b=disjunction c=('if' z=disjunction { z })* { + _Py_comprehension(a, b, c, 0, p->arena) } + | invalid_for_target + +yield_expr[expr_ty]: + | 'yield' 'from' a=expression { _Py_YieldFrom(a, EXTRA) } + | 'yield' a=[star_expressions] { _Py_Yield(a, EXTRA) } + +arguments[expr_ty] (memo): + | a=args [','] &')' { a } + | invalid_arguments +args[expr_ty]: + | a=','.(starred_expression | named_expression !'=')+ b=[',' k=kwargs {k}] { _PyPegen_collect_call_seqs(p, a, b, EXTRA) } + | a=kwargs { _Py_Call(_PyPegen_dummy_name(p), + CHECK_NULL_ALLOWED(_PyPegen_seq_extract_starred_exprs(p, a)), + CHECK_NULL_ALLOWED(_PyPegen_seq_delete_starred_exprs(p, a)), + EXTRA) } +kwargs[asdl_seq*]: + | a=','.kwarg_or_starred+ ',' b=','.kwarg_or_double_starred+ { _PyPegen_join_sequences(p, a, b) } + | ','.kwarg_or_starred+ + | ','.kwarg_or_double_starred+ +starred_expression[expr_ty]: + | '*' a=expression { _Py_Starred(a, Load, EXTRA) } +kwarg_or_starred[KeywordOrStarred*]: + | a=NAME '=' b=expression { + _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) } + | a=starred_expression { _PyPegen_keyword_or_starred(p, a, 0) } + | invalid_kwarg +kwarg_or_double_starred[KeywordOrStarred*]: + | a=NAME '=' b=expression { + _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) } + | '**' a=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(NULL, a, EXTRA)), 1) } + | invalid_kwarg + +# NOTE: star_targets may contain *bitwise_or, targets may not. +star_targets[expr_ty]: + | a=star_target !',' { a } + | a=star_target b=(',' c=star_target { c })* [','] { + _Py_Tuple(CHECK(_PyPegen_seq_insert_in_front(p, a, b)), Store, EXTRA) } +star_targets_list_seq[asdl_seq*]: a=','.star_target+ [','] { a } +star_targets_tuple_seq[asdl_seq*]: + | a=star_target b=(',' c=star_target { c })+ [','] { _PyPegen_seq_insert_in_front(p, a, b) } + | a=star_target ',' { _PyPegen_singleton_seq(p, a) } +star_target[expr_ty] (memo): + | '*' a=(!'*' star_target) { + _Py_Starred(CHECK(_PyPegen_set_expr_context(p, a, Store)), Store, EXTRA) } + | target_with_star_atom +target_with_star_atom[expr_ty] (memo): + | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) } + | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } + | star_atom +star_atom[expr_ty]: + | a=NAME { _PyPegen_set_expr_context(p, a, Store) } + | '(' a=target_with_star_atom ')' { _PyPegen_set_expr_context(p, a, Store) } + | '(' a=[star_targets_tuple_seq] ')' { _Py_Tuple(a, Store, EXTRA) } + | '[' a=[star_targets_list_seq] ']' { _Py_List(a, Store, EXTRA) } + +single_target[expr_ty]: + | single_subscript_attribute_target + | a=NAME { _PyPegen_set_expr_context(p, a, Store) } + | '(' a=single_target ')' { a } +single_subscript_attribute_target[expr_ty]: + | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) } + | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } + +del_targets[asdl_seq*]: a=','.del_target+ [','] { a } +del_target[expr_ty] (memo): + | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } + | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Del, EXTRA) } + | del_t_atom +del_t_atom[expr_ty]: + | a=NAME { _PyPegen_set_expr_context(p, a, Del) } + | '(' a=del_target ')' { _PyPegen_set_expr_context(p, a, Del) } + | '(' a=[del_targets] ')' { _Py_Tuple(a, Del, EXTRA) } + | '[' a=[del_targets] ']' { _Py_List(a, Del, EXTRA) } + +targets[asdl_seq*]: a=','.target+ [','] { a } +target[expr_ty] (memo): + | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) } + | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } + | t_atom +t_primary[expr_ty]: + | a=t_primary '.' b=NAME &t_lookahead { _Py_Attribute(a, b->v.Name.id, Load, EXTRA) } + | a=t_primary '[' b=slices ']' &t_lookahead { _Py_Subscript(a, b, Load, EXTRA) } + | a=t_primary b=genexp &t_lookahead { _Py_Call(a, CHECK(_PyPegen_singleton_seq(p, b)), NULL, EXTRA) } + | a=t_primary '(' b=[arguments] ')' &t_lookahead { + _Py_Call(a, + (b) ? ((expr_ty) b)->v.Call.args : NULL, + (b) ? ((expr_ty) b)->v.Call.keywords : NULL, + EXTRA) } + | a=atom &t_lookahead { a } +t_lookahead: '(' | '[' | '.' +t_atom[expr_ty]: + | a=NAME { _PyPegen_set_expr_context(p, a, Store) } + | '(' a=target ')' { _PyPegen_set_expr_context(p, a, Store) } + | '(' b=[targets] ')' { _Py_Tuple(b, Store, EXTRA) } + | '[' b=[targets] ']' { _Py_List(b, Store, EXTRA) } + + +# From here on, there are rules for invalid syntax with specialised error messages +invalid_arguments: + | args ',' '*' { RAISE_SYNTAX_ERROR("iterable argument unpacking follows keyword argument unpacking") } + | a=expression for_if_clauses ',' [args | expression for_if_clauses] { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") } + | a=args for_if_clauses { _PyPegen_nonparen_genexp_in_call(p, a) } + | args ',' a=expression for_if_clauses { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") } + | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) } +invalid_kwarg: + | !(NAME '=') a=expression b='=' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, "expression cannot contain assignment, perhaps you meant \"==\"?") } +invalid_named_expression: + | a=expression ':=' expression { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, "cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } +invalid_assignment: + | a=invalid_ann_assign_target ':' expression { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, + "only single target (not %s) can be annotated", + _PyPegen_get_expr_name(a) + )} + | a=star_named_expression ',' star_named_expressions* ':' expression { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=expression ':' expression { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } + | (star_targets '=')* a=star_expressions '=' { + RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) } + | (star_targets '=')* a=yield_expr '=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "assignment to yield expression not possible") } + | a=star_expressions augassign (yield_expr | star_expressions) { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, + "'%s' is an illegal expression for augmented assignment", + _PyPegen_get_expr_name(a) + )} +invalid_ann_assign_target[expr_ty]: + | list + | tuple + | '(' a=invalid_ann_assign_target ')' { a } +invalid_del_stmt: + | 'del' a=star_expressions { + RAISE_SYNTAX_ERROR_INVALID_TARGET(DEL_TARGETS, a) } +invalid_block: + | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } +invalid_primary: + | primary a='{' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "invalid syntax") } +invalid_comprehension: + | ('[' | '(' | '{') a=starred_expression for_if_clauses { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "iterable unpacking cannot be used in comprehension") } +invalid_dict_comprehension: + | '{' a='**' bitwise_or for_if_clauses '}' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "dict unpacking cannot be used in dict comprehension") } +invalid_parameters: + | param_no_default* (slash_with_default | param_with_default+) param_no_default { + RAISE_SYNTAX_ERROR("non-default argument follows default argument") } +invalid_lambda_parameters: + | lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default { + RAISE_SYNTAX_ERROR("non-default argument follows default argument") } +invalid_star_etc: + | '*' (')' | ',' (')' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") } + | '*' ',' TYPE_COMMENT { RAISE_SYNTAX_ERROR("bare * has associated type comment") } +invalid_lambda_star_etc: + | '*' (':' | ',' (':' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") } +invalid_double_type_comments: + | TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT { + RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } +invalid_with_item: + | expression 'as' a=expression { + RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) } + +invalid_for_target: + | ASYNC? 'for' a=star_expressions { + RAISE_SYNTAX_ERROR_INVALID_TARGET(FOR_TARGETS, a) } + +invalid_group: + | '(' a=starred_expression ')' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "can't use starred expression here") } +invalid_import_from_targets: + | import_from_as_names ',' { + RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") } \ No newline at end of file diff --git a/native/libcst/README.md b/native/libcst/README.md new file mode 100644 index 00000000..f33563b2 --- /dev/null +++ b/native/libcst/README.md @@ -0,0 +1,66 @@ +# libcst_native + +A very experimental native extension to speed up LibCST. This does not currently provide +much performance benefit and is therefore not recommended for general use. + +The extension is written in Rust using [PyO3](https://pyo3.rs/). + +This installs as a separate python package that LibCST looks for and will import if it's +available. + + +## Using with LibCST + +[Set up a rust development environment](https://www.rust-lang.org/tools/install). Using +`rustup` is recommended, but not necessary. Rust 1.45.0+ should work. + +Follow the instructions for setting up a virtualenv in the top-level README, then: + +``` +cd libcst_native +maturin develop # install libcst_native to the virtualenv +cd .. # cd back into the main project +python -m unittest +``` + +This will run the python test suite. Nothing special is required to use `libcst_native`, +since `libcst` will automatically use the native extension when it's installed. + +When benchmarking this code, make sure to run `maturin develop` with the `--release` +flag to enable compiler optimizations. + +You can disable the native extension by uninstalling the package from your virtualenv: + +``` +pip uninstall libcst_native +``` + + +## Rust Tests + +In addition to running the python test suite, you can run some tests written in rust +with + +``` +cargo test --no-default-features +``` + +The `--no-default-features` flag needed to work around an incompatibility between tests +and pyo3's `extension-module` feature. + + +## Code Formatting + +Use `cargo fmt` to format your code. + + +## Release + +This isn't currently supported, so there's no releases available, but the end-goal would +be to publish this on PyPI. + +Because this is a native extension, it must be re-built for each platform/architecture. +The per-platform build could be automated using a CI system, [like github +actions][gh-actions]. + +[gh-actions]: https://github.com/PyO3/maturin/blob/master/.github/workflows/release.yml diff --git a/native/libcst/benches/parser_benchmark.rs b/native/libcst/benches/parser_benchmark.rs new file mode 100644 index 00000000..92f51883 --- /dev/null +++ b/native/libcst/benches/parser_benchmark.rs @@ -0,0 +1,105 @@ +use std::{ + path::{Component, PathBuf}, + time::Duration, +}; + +use criterion::{ + black_box, criterion_group, criterion_main, measurement::Measurement, BatchSize, Criterion, +}; +use criterion_cycles_per_byte::CyclesPerByte; +use itertools::Itertools; +use libcst_native::{ + parse_module, parse_tokens_without_whitespace, tokenize, Codegen, Config, Inflate, +}; + +fn load_all_fixtures() -> String { + let mut path = PathBuf::from(file!()); + path.pop(); + path.pop(); + path = path + .components() + .skip(1) + .chain( + vec!["tests".as_ref(), "fixtures".as_ref()] + .into_iter() + .map(Component::Normal), + ) + .collect(); + + path.read_dir() + .expect("read_dir") + .into_iter() + .map(|file| { + let path = file.unwrap().path(); + std::fs::read_to_string(&path).expect("reading_file") + }) + .join("\n") +} + +pub fn inflate_benchmarks(c: &mut Criterion) { + let fixture = load_all_fixtures(); + let tokens = tokenize(fixture.as_str()).expect("tokenize failed"); + let mut group = c.benchmark_group("inflate"); + group.bench_function("all", |b| { + b.iter_batched( + || { + let conf = Config::new(fixture.as_str(), &tokens); + let m = parse_tokens_without_whitespace(tokens.clone(), fixture.as_str(), None) + .expect("parse failed"); + (conf, m) + }, + |(conf, m)| black_box(m.inflate(&conf)), + BatchSize::SmallInput, + ) + }); + group.finish(); +} + +pub fn parser_benchmarks(c: &mut Criterion) { + let fixture = load_all_fixtures(); + let mut group = c.benchmark_group("parse"); + group.measurement_time(Duration::from_secs(15)); + group.bench_function("all", |b| { + b.iter_batched( + || tokenize(fixture.as_str()).expect("tokenize failed"), + |tokens| { + black_box(parse_tokens_without_whitespace( + tokens, + fixture.as_str(), + None, + )) + }, + BatchSize::SmallInput, + ) + }); + group.finish(); +} + +pub fn codegen_benchmarks(c: &mut Criterion) { + let input = load_all_fixtures(); + let m = parse_module(&input, None).expect("parse failed"); + let mut group = c.benchmark_group("codegen"); + group.bench_function("all", |b| { + b.iter(|| { + let mut state = Default::default(); + #[allow(clippy::unit_arg)] + black_box(m.codegen(&mut state)); + }) + }); + group.finish(); +} + +pub fn tokenize_benchmarks(c: &mut Criterion) { + let input = load_all_fixtures(); + let mut group = c.benchmark_group("tokenize"); + group.measurement_time(Duration::from_secs(15)); + group.bench_function("all", |b| b.iter(|| black_box(tokenize(input.as_str())))); + group.finish(); +} + +criterion_group!( + name=benches; + config = Criterion::default().with_measurement(CyclesPerByte); + targets=parser_benchmarks, codegen_benchmarks, inflate_benchmarks, tokenize_benchmarks +); +criterion_main!(benches); diff --git a/native/libcst/src/bin.rs b/native/libcst/src/bin.rs new file mode 100644 index 00000000..234b9017 --- /dev/null +++ b/native/libcst/src/bin.rs @@ -0,0 +1,28 @@ +use libcst_native::*; +use std::{ + env, + io::{self, Read}, + process::exit, +}; + +pub fn main() { + let mut str = std::string::String::new(); + io::stdin().read_to_string(&mut str).unwrap(); + match parse_module(str.as_ref(), None) { + Err(e) => { + eprintln!("{}", prettify_error(e, "stdin")); + exit(1); + } + Ok(m) => { + let first_arg = env::args().nth(1).unwrap_or_else(|| "".to_string()); + if first_arg == "-d" { + println!("{:#?}", m); + } + if first_arg != "-n" { + let mut state = Default::default(); + m.codegen(&mut state); + print!("{}", state.to_string()); + } + } + }; +} diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs new file mode 100644 index 00000000..6c809768 --- /dev/null +++ b/native/libcst/src/lib.rs @@ -0,0 +1,167 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::cmp::{max, min}; + +mod tokenizer; + +pub use tokenizer::whitespace_parser::Config; +use tokenizer::{whitespace_parser, TokConfig, Token, TokenIterator}; + +mod nodes; +pub use nodes::*; + +mod parser; +use parser::{ParserError, Result}; + +pub mod py; + +pub fn tokenize(text: &str) -> Result> { + let iter = TokenIterator::new( + text, + &TokConfig { + async_hacks: false, + split_fstring: true, + }, + ); + + iter.collect::, _>>() + .map_err(|err| ParserError::TokenizerError(err, text)) +} + +pub fn parse_tokens_without_whitespace<'a>( + tokens: Vec>, + module_text: &'a str, + encoding: Option<&str>, +) -> Result<'a, Module<'a>> { + parser::python::file(&tokens.into(), module_text, encoding) + .map_err(|err| ParserError::ParserError(err, module_text)) +} + +pub fn parse_module<'a>( + mut module_text: &'a str, + encoding: Option<&str>, +) -> Result<'a, Module<'a>> { + // Strip UTF-8 BOM + if let Some(stripped) = module_text.strip_prefix('\u{feff}') { + module_text = stripped; + } + let tokens = tokenize(module_text)?; + let conf = whitespace_parser::Config::new(module_text, &tokens); + let m = parse_tokens_without_whitespace(tokens, module_text, encoding)?; + Ok(m.inflate(&conf)?) +} + +pub fn parse_statement(text: &str) -> Result { + let tokens = tokenize(text)?; + let conf = whitespace_parser::Config::new(text, &tokens); + let stm = parser::python::statement_input(&tokens.into(), text) + .map_err(|err| ParserError::ParserError(err, text))?; + Ok(stm.inflate(&conf)?) +} + +pub fn parse_expression(text: &str) -> Result { + let tokens = tokenize(text)?; + let conf = whitespace_parser::Config::new(text, &tokens); + let expr = parser::python::expression_input(&tokens.into(), text) + .map_err(|err| ParserError::ParserError(err, text))?; + Ok(expr.inflate(&conf)?) +} + +// n starts from 1 +fn bol_offset(source: &str, n: i32) -> usize { + if n <= 1 { + return 0; + } + source + .match_indices('\n') + .nth((n - 2) as usize) + .map(|(index, _)| index + 1) + .unwrap_or_else(|| source.len()) +} + +pub fn prettify_error(err: ParserError, label: &str) -> std::string::String { + match err { + ParserError::ParserError(e, module_text) => { + let loc = e.location; + let context = 1; + let start_offset = bol_offset(module_text, loc.start_pos.line as i32 - context); + let end_offset = bol_offset(module_text, loc.end_pos.line as i32 + context + 1); + let source = &module_text[start_offset..end_offset]; + let start = loc.start_pos.offset - start_offset; + let end = loc.end_pos.offset - start_offset; + chic::Error::new(label) + .error( + max( + 1, + loc.start_pos + .line + .checked_sub(context as usize) + .unwrap_or(1), + ), + start, + if start == end { + min(end + 1, end_offset - start_offset + 1) + } else { + end + }, + source, + format!( + "expected {} {} -> {}", + e.expected, loc.start_pos, loc.end_pos + ), + ) + .to_string() + } + e => format!("Parse error for {}: {}", label, e), + } +} + +#[cfg(test)] +mod test { + use super::*; + use tokenizer::TokError; + + #[test] + fn test_simple() { + let n = parse_module("1_", None); + assert_eq!( + n.err().unwrap(), + ParserError::TokenizerError(TokError::BadDecimal, "1_") + ); + } + + #[test] + fn test_bare_minimum_funcdef() { + parse_module("def f(): ...", None).expect("parse error"); + } + + #[test] + fn test_funcdef_params() { + parse_module("def g(a, b): ...", None).expect("parse error"); + } + + #[test] + fn bol_offset_first_line() { + assert_eq!(0, bol_offset("hello", 1)); + assert_eq!(0, bol_offset("hello", 0)); + assert_eq!(0, bol_offset("hello\nhello", 1)); + assert_eq!(0, bol_offset("hello\nhello", 0)); + } + + #[test] + fn bol_offset_second_line() { + assert_eq!(5, bol_offset("hello", 2)); + assert_eq!(6, bol_offset("hello\nhello", 2)); + assert_eq!(6, bol_offset("hello\nhello\nhello", 2)); + } + + #[test] + fn bol_offset_last_line() { + assert_eq!(5, bol_offset("hello", 3)); + assert_eq!(11, bol_offset("hello\nhello", 3)); + assert_eq!(12, bol_offset("hello\nhello\nhello", 3)); + } +} diff --git a/native/libcst/src/nodes/codegen.rs b/native/libcst/src/nodes/codegen.rs new file mode 100644 index 00000000..3b4f3e7a --- /dev/null +++ b/native/libcst/src/nodes/codegen.rs @@ -0,0 +1,65 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::fmt; +#[derive(Debug)] +pub struct CodegenState<'a> { + pub tokens: String, + pub indent_tokens: Vec<&'a str>, + pub default_newline: &'a str, + pub default_indent: &'a str, +} + +impl<'a> CodegenState<'a> { + pub fn indent(&mut self, v: &'a str) { + self.indent_tokens.push(v); + } + pub fn dedent(&mut self) { + self.indent_tokens.pop(); + } + pub fn add_indent(&mut self) { + self.tokens.extend(self.indent_tokens.iter().cloned()); + } + pub fn add_token(&mut self, tok: &'a str) { + self.tokens.push_str(tok); + } +} + +impl<'a> fmt::Display for CodegenState<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.tokens) + } +} + +pub trait Codegen<'a> { + fn codegen(&self, state: &mut CodegenState<'a>); +} + +impl<'a, T> Codegen<'a> for Option +where + T: Codegen<'a>, +{ + fn codegen(&self, state: &mut CodegenState<'a>) { + if let Some(s) = &self { + s.codegen(state); + } + } +} + +#[cfg(windows)] +const LINE_ENDING: &str = "\r\n"; +#[cfg(not(windows))] +const LINE_ENDING: &str = "\n"; + +impl<'a> Default for CodegenState<'a> { + fn default() -> Self { + Self { + default_newline: LINE_ENDING, + default_indent: " ", + indent_tokens: Default::default(), + tokens: Default::default(), + } + } +} diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs new file mode 100644 index 00000000..cb70d572 --- /dev/null +++ b/native/libcst/src/nodes/expression.rs @@ -0,0 +1,2234 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::{mem::swap, rc::Rc}; + +use crate::{ + inflate_helpers::adjust_parameters_trailing_whitespace, + nodes::{ + traits::{Inflate, ParenthesizedNode, Result, WithComma}, + whitespace::ParenthesizableWhitespace, + Annotation, AssignEqual, AssignTargetExpression, BinaryOp, BooleanOp, Codegen, + CodegenState, Colon, Comma, CompOp, Dot, UnaryOp, + }, + tokenizer::{ + whitespace_parser::{parse_parenthesizable_whitespace, Config}, + Token, + }, +}; +use libcst_derive::{Codegen, Inflate, IntoPy, ParenthesizedNode}; +use pyo3::{types::PyModule, IntoPy}; + +type TokenRef<'a> = Rc>; + +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +pub struct Parameters<'a> { + pub params: Vec>, + pub star_arg: Option>, + pub kwonly_params: Vec>, + pub star_kwarg: Option>, + pub posonly_params: Vec>, + pub posonly_ind: Option>, +} + +impl<'a> Parameters<'a> { + pub fn is_empty(&self) -> bool { + self.params.is_empty() + && self.star_arg.is_none() + && self.kwonly_params.is_empty() + && self.star_kwarg.is_none() + && self.posonly_params.is_empty() + && self.posonly_ind.is_none() + } +} + +impl<'a> Inflate<'a> for Parameters<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.posonly_params = self.posonly_params.inflate(config)?; + self.posonly_ind = self.posonly_ind.inflate(config)?; + self.params = self.params.inflate(config)?; + self.star_arg = self.star_arg.inflate(config)?; + self.kwonly_params = self.kwonly_params.inflate(config)?; + self.star_kwarg = self.star_kwarg.inflate(config)?; + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, IntoPy)] +pub enum StarArg<'a> { + Star(ParamStar<'a>), + Param(Box>), +} + +impl<'a> Codegen<'a> for Parameters<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + let params_after_kwonly = self.star_kwarg.is_some(); + let params_after_regular = !self.kwonly_params.is_empty() || params_after_kwonly; + let params_after_posonly = !self.params.is_empty() || params_after_regular; + let star_included = self.star_arg.is_some() || !self.kwonly_params.is_empty(); + + for p in &self.posonly_params { + p.codegen(state, None, true); + } + + match &self.posonly_ind { + Some(ind) => ind.codegen(state, params_after_posonly), + _ => { + if !self.posonly_params.is_empty() { + if params_after_posonly { + state.add_token("/, "); + } else { + state.add_token("/"); + } + } + } + } + + let param_size = self.params.len(); + for (i, p) in self.params.iter().enumerate() { + p.codegen(state, None, params_after_regular || i < param_size - 1); + } + + let kwonly_size = self.kwonly_params.len(); + match &self.star_arg { + None => { + if star_included { + state.add_token("*, ") + } + } + Some(StarArg::Param(p)) => p.codegen( + state, + Some("*"), + kwonly_size > 0 || self.star_kwarg.is_some(), + ), + Some(StarArg::Star(s)) => s.codegen(state), + } + + for (i, p) in self.kwonly_params.iter().enumerate() { + p.codegen(state, None, params_after_kwonly || i < kwonly_size - 1); + } + + if let Some(star) = &self.star_kwarg { + star.codegen(state, Some("**"), false) + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct ParamSlash<'a> { + pub comma: Option>, +} + +impl<'a> ParamSlash<'a> { + fn codegen(&self, state: &mut CodegenState<'a>, default_comma: bool) { + state.add_token("/"); + match (&self.comma, default_comma) { + (Some(comma), _) => comma.codegen(state), + (None, true) => state.add_token(", "), + _ => {} + } + } +} + +impl<'a> Inflate<'a> for ParamSlash<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.comma = self.comma.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct ParamStar<'a> { + pub comma: Comma<'a>, +} + +impl<'a> Codegen<'a> for ParamStar<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("*"); + self.comma.codegen(state); + } +} + +impl<'a> Inflate<'a> for ParamStar<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.comma = self.comma.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, Eq, PartialEq, Default, Clone, ParenthesizedNode, IntoPy)] +pub struct Name<'a> { + pub value: &'a str, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for Name<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Name<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token(self.value); + }); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Param<'a> { + pub name: Name<'a>, + pub annotation: Option>, + pub equal: Option>, + pub default: Option>, + + pub comma: Option>, + + pub star: Option<&'a str>, + + pub whitespace_after_star: ParenthesizableWhitespace<'a>, + pub whitespace_after_param: ParenthesizableWhitespace<'a>, + + pub(crate) star_tok: Option>, +} + +impl<'a> Inflate<'a> for Param<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + // TODO: whitespace_after_param missing? + self.name = self.name.inflate(config)?; + self.annotation = self.annotation.inflate(config)?; + self.equal = self.equal.inflate(config)?; + self.default = self.default.inflate(config)?; + self.comma = self.comma.inflate(config)?; + if let Some(star_tok) = self.star_tok.as_mut() { + self.whitespace_after_star = parse_parenthesizable_whitespace( + config, + &mut star_tok.whitespace_after.borrow_mut(), + )?; + } + Ok(self) + } +} + +impl<'a> Default for Param<'a> { + fn default() -> Self { + Self { + name: Default::default(), + annotation: None, + equal: None, + default: None, + comma: None, + star: Some(""), // Note: this preserves a quirk of the pure python parser + whitespace_after_param: Default::default(), + whitespace_after_star: Default::default(), + star_tok: None, + } + } +} + +impl<'a> Param<'a> { + fn codegen( + &self, + state: &mut CodegenState<'a>, + default_star: Option<&'a str>, + default_comma: bool, + ) { + match (self.star, default_star) { + (Some(star), _) => state.add_token(star), + (None, Some(star)) => state.add_token(star), + _ => {} + } + self.whitespace_after_star.codegen(state); + self.name.codegen(state); + + if let Some(ann) = &self.annotation { + ann.codegen(state, ":"); + } + + match (&self.equal, &self.default) { + (Some(equal), Some(def)) => { + equal.codegen(state); + def.codegen(state); + } + (None, Some(def)) => { + state.add_token(" = "); + def.codegen(state); + } + _ => {} + } + + match &self.comma { + Some(comma) => comma.codegen(state), + None if default_comma => state.add_token(", "), + _ => {} + } + + self.whitespace_after_param.codegen(state); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Arg<'a> { + pub value: Expression<'a>, + pub keyword: Option>, + pub equal: Option>, + pub comma: Option>, + pub star: &'a str, + pub whitespace_after_star: ParenthesizableWhitespace<'a>, + pub whitespace_after_arg: ParenthesizableWhitespace<'a>, + + pub(crate) star_tok: Option>, +} + +impl<'a> Inflate<'a> for Arg<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + if let Some(star_tok) = self.star_tok.as_mut() { + self.whitespace_after_star = parse_parenthesizable_whitespace( + config, + &mut star_tok.whitespace_after.borrow_mut(), + )?; + } + self.keyword = self.keyword.inflate(config)?; + self.equal = self.equal.inflate(config)?; + self.value = self.value.inflate(config)?; + self.comma = self.comma.inflate(config)?; + // whitespace_after_arg is handled in Call + Ok(self) + } +} + +impl<'a> Arg<'a> { + pub fn codegen(&self, state: &mut CodegenState<'a>, default_comma: bool) { + state.add_token(self.star); + self.whitespace_after_star.codegen(state); + if let Some(kw) = &self.keyword { + kw.codegen(state); + } + if let Some(eq) = &self.equal { + eq.codegen(state); + } else if self.keyword.is_some() { + state.add_token(" = "); + } + self.value.codegen(state); + + if let Some(comma) = &self.comma { + comma.codegen(state); + } else if default_comma { + state.add_token(", "); + } + + self.whitespace_after_arg.codegen(state); + } +} + +impl<'a> WithComma<'a> for Arg<'a> { + fn with_comma(self, c: Comma<'a>) -> Self { + Self { + comma: Some(c), + ..self + } + } +} + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub struct LeftParen<'a> { + /// Any space that appears directly after this left parenthesis. + pub whitespace_after: ParenthesizableWhitespace<'a>, + + pub(crate) lpar_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for LeftParen<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("("); + self.whitespace_after.codegen(state); + } +} + +impl<'a> Inflate<'a> for LeftParen<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.lpar_tok).whitespace_after.borrow_mut(), + )?; + Ok(self) + } +} + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub struct RightParen<'a> { + /// Any space that appears directly before this right parenthesis. + pub whitespace_before: ParenthesizableWhitespace<'a>, + + pub(crate) rpar_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for RightParen<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token(")"); + } +} + +impl<'a> Inflate<'a> for RightParen<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.rpar_tok).whitespace_before.borrow_mut(), + )?; + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Eq, PartialEq, Clone, ParenthesizedNode, Codegen, Inflate, IntoPy)] +pub enum Expression<'a> { + Name(Name<'a>), + Ellipsis(Ellipsis<'a>), + Integer(Integer<'a>), + Float(Float<'a>), + Imaginary(Imaginary<'a>), + Comparison(Comparison<'a>), + UnaryOperation(UnaryOperation<'a>), + BinaryOperation(BinaryOperation<'a>), + BooleanOperation(BooleanOperation<'a>), + Attribute(Attribute<'a>), + Tuple(Tuple<'a>), + Call(Call<'a>), + GeneratorExp(GeneratorExp<'a>), + ListComp(ListComp<'a>), + SetComp(SetComp<'a>), + DictComp(DictComp<'a>), + List(List<'a>), + Set(Set<'a>), + Dict(Dict<'a>), + Subscript(Subscript<'a>), + StarredElement(StarredElement<'a>), + IfExp(IfExp<'a>), + Lambda(Lambda<'a>), + Yield(Yield<'a>), + Await(Await<'a>), + SimpleString(SimpleString<'a>), + ConcatenatedString(ConcatenatedString<'a>), + FormattedString(FormattedString<'a>), + NamedExpr(NamedExpr<'a>), +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Ellipsis<'a> { + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for Ellipsis<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token("..."); + }) + } +} +impl<'a> Inflate<'a> for Ellipsis<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Integer<'a> { + /// A string representation of the integer, such as ``"100000"`` or + /// ``"100_000"``. + pub value: &'a str, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for Integer<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token(self.value); + }) + } +} + +impl<'a> Inflate<'a> for Integer<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Float<'a> { + /// A string representation of the floating point number, such as ```"0.05"``, + /// ``".050"``, or ``"5e-2"``. + pub value: &'a str, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for Float<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token(self.value); + }) + } +} + +impl<'a> Inflate<'a> for Float<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Imaginary<'a> { + /// A string representation of the complex number, such as ``"2j"`` + pub value: &'a str, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for Imaginary<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token(self.value); + }) + } +} + +impl<'a> Inflate<'a> for Imaginary<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Comparison<'a> { + pub left: Box>, + pub comparisons: Vec>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for Comparison<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.left.codegen(state); + for comp in &self.comparisons { + comp.codegen(state); + } + }) + } +} +impl<'a> Inflate<'a> for Comparison<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.left = self.left.inflate(config)?; + self.comparisons = self.comparisons.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct UnaryOperation<'a> { + pub operator: UnaryOp<'a>, + pub expression: Box>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for UnaryOperation<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.operator.codegen(state); + self.expression.codegen(state); + }) + } +} + +impl<'a> Inflate<'a> for UnaryOperation<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.operator = self.operator.inflate(config)?; + self.expression = self.expression.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct BinaryOperation<'a> { + pub left: Box>, + pub operator: BinaryOp<'a>, + pub right: Box>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for BinaryOperation<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.left.codegen(state); + self.operator.codegen(state); + self.right.codegen(state); + }) + } +} + +impl<'a> Inflate<'a> for BinaryOperation<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.left = self.left.inflate(config)?; + self.operator = self.operator.inflate(config)?; + self.right = self.right.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct BooleanOperation<'a> { + pub left: Box>, + pub operator: BooleanOp<'a>, + pub right: Box>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for BooleanOperation<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.left.codegen(state); + self.operator.codegen(state); + self.right.codegen(state); + }) + } +} + +impl<'a> Inflate<'a> for BooleanOperation<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.left = self.left.inflate(config)?; + self.operator = self.operator.inflate(config)?; + self.right = self.right.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Call<'a> { + pub func: Box>, + pub args: Vec>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_after_func: ParenthesizableWhitespace<'a>, + pub whitespace_before_args: ParenthesizableWhitespace<'a>, + + pub(crate) lpar_tok: TokenRef<'a>, + pub(crate) rpar_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Call<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.func = self.func.inflate(config)?; + self.whitespace_after_func = parse_parenthesizable_whitespace( + config, + &mut (*self.lpar_tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_before_args = parse_parenthesizable_whitespace( + config, + &mut (*self.lpar_tok).whitespace_after.borrow_mut(), + )?; + self.args = self.args.inflate(config)?; + + if let Some(arg) = self.args.last_mut() { + if arg.comma.is_none() { + arg.whitespace_after_arg = parse_parenthesizable_whitespace( + config, + &mut (*self.rpar_tok).whitespace_before.borrow_mut(), + )?; + } + } + self.rpar = self.rpar.inflate(config)?; + + Ok(self) + } +} + +impl<'a> Codegen<'a> for Call<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.func.codegen(state); + self.whitespace_after_func.codegen(state); + state.add_token("("); + self.whitespace_before_args.codegen(state); + let arg_len = self.args.len(); + for (i, arg) in self.args.iter().enumerate() { + arg.codegen(state, i + 1 < arg_len); + } + state.add_token(")"); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Attribute<'a> { + pub value: Box>, + pub attr: Name<'a>, + pub dot: Dot<'a>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for Attribute<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.value = self.value.inflate(config)?; + self.dot = self.dot.inflate(config)?; + self.attr = self.attr.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Attribute<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.value.codegen(state); + self.dot.codegen(state); + self.attr.codegen(state); + }) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] +pub enum NameOrAttribute<'a> { + N(Name<'a>), + A(Attribute<'a>), +} + +impl<'a> std::convert::From> for Expression<'a> { + fn from(x: NameOrAttribute<'a>) -> Self { + match x { + NameOrAttribute::N(n) => Self::Name(n), + NameOrAttribute::A(a) => Self::Attribute(a), + } + } +} + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub struct ComparisonTarget<'a> { + pub operator: CompOp<'a>, + pub comparator: Expression<'a>, +} + +impl<'a> Codegen<'a> for ComparisonTarget<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.operator.codegen(state); + self.comparator.codegen(state); + } +} + +impl<'a> Inflate<'a> for ComparisonTarget<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.operator = self.operator.inflate(config)?; + self.comparator = self.comparator.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct StarredElement<'a> { + pub value: Box>, + pub comma: Option>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_before_value: ParenthesizableWhitespace<'a>, + + pub(crate) star_tok: TokenRef<'a>, +} + +impl<'a> StarredElement<'a> { + pub fn inflate_element(mut self, config: &Config<'a>, is_last: bool) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.whitespace_before_value = parse_parenthesizable_whitespace( + config, + &mut (*self.star_tok).whitespace_after.borrow_mut(), + )?; + self.value = self.value.inflate(config)?; + self.comma = if is_last { + self.comma.map(|c| c.inflate_before(config)).transpose() + } else { + self.comma.inflate(config) + }?; + Ok(self) + } +} + +impl<'a> Inflate<'a> for StarredElement<'a> { + fn inflate(self, config: &Config<'a>) -> Result { + self.inflate_element(config, false) + } +} + +impl<'a> Codegen<'a> for StarredElement<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token("*"); + self.whitespace_before_value.codegen(state); + self.value.codegen(state); + }); + if let Some(comma) = &self.comma { + comma.codegen(state); + } + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum Element<'a> { + Simple { + value: Expression<'a>, + comma: Option>, + }, + Starred(StarredElement<'a>), +} + +// TODO: this could be a derive helper attribute to override the python class name +impl<'a> IntoPy for Element<'a> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + match self { + Self::Starred(s) => s.into_py(py), + Self::Simple { value, comma } => { + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + let kwargs = [ + Some(("value", value.into_py(py))), + comma.map(|x| ("comma", x.into_py(py))), + ] + .iter() + .filter(|x| x.is_some()) + .map(|x| x.as_ref().unwrap()) + .collect::>() + .into_py_dict(py); + libcst + .getattr("Element") + .expect("no Element found in libcst") + .call((), Some(kwargs)) + .expect("conversion failed") + .into() + } + } + } +} + +impl<'a> Element<'a> { + fn codegen( + &self, + state: &mut CodegenState<'a>, + default_comma: bool, + default_comma_whitespace: bool, + ) { + match self { + Self::Simple { value, comma } => { + value.codegen(state); + if let Some(comma) = comma { + comma.codegen(state) + } + } + Self::Starred(s) => s.codegen(state), + } + let maybe_comma = match self { + Self::Simple { comma, .. } => comma, + Self::Starred(s) => &s.comma, + }; + if maybe_comma.is_none() && default_comma { + state.add_token(if default_comma_whitespace { ", " } else { "," }); + } + } + + pub fn inflate_element(self, config: &Config<'a>, is_last: bool) -> Result { + Ok(match self { + Self::Starred(s) => Self::Starred(s.inflate_element(config, is_last)?), + Self::Simple { value, comma } => Self::Simple { + value: value.inflate(config)?, + comma: if is_last { + comma.map(|c| c.inflate_before(config)).transpose()? + } else { + comma.inflate(config)? + }, + }, + }) + } +} + +impl<'a> WithComma<'a> for Element<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { + let comma = Some(comma); + match self { + Self::Simple { value, .. } => Self::Simple { comma, value }, + Self::Starred(s) => Self::Starred(StarredElement { comma, ..s }), + } + } +} +impl<'a> std::convert::From> for Element<'a> { + fn from(e: Expression<'a>) -> Self { + match e { + Expression::StarredElement(e) => Element::Starred(e), + value => Element::Simple { value, comma: None }, + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode, IntoPy)] +pub struct Tuple<'a> { + pub elements: Vec>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for Tuple<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result> { + self.lpar = self.lpar.inflate(config)?; + let len = self.elements.len(); + self.elements = self + .elements + .into_iter() + .enumerate() + .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) + .collect::>>()?; + if !self.elements.is_empty() { + // rpar only has whitespace if elements is non empty + self.rpar = self.rpar.inflate(config)?; + } + Ok(self) + } +} + +impl<'a> Codegen<'a> for Tuple<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + let len = self.elements.len(); + if len == 1 { + self.elements.first().unwrap().codegen(state, true, false); + } else { + for (idx, el) in self.elements.iter().enumerate() { + el.codegen(state, idx < len - 1, true); + } + } + }); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct GeneratorExp<'a> { + pub elt: Box>, + pub for_in: Box>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for GeneratorExp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.elt.codegen(state); + self.for_in.codegen(state); + }) + } +} + +impl<'a> Inflate<'a> for GeneratorExp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.elt = self.elt.inflate(config)?; + self.for_in = self.for_in.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct ListComp<'a> { + pub elt: Box>, + pub for_in: Box>, + pub lbracket: LeftSquareBracket<'a>, + pub rbracket: RightSquareBracket<'a>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for ListComp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.lbracket.codegen(state); + self.elt.codegen(state); + self.for_in.codegen(state); + self.rbracket.codegen(state); + }) + } +} + +impl<'a> Inflate<'a> for ListComp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbracket = self.lbracket.inflate(config)?; + self.elt = self.elt.inflate(config)?; + self.for_in = self.for_in.inflate(config)?; + self.rbracket = self.rbracket.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct LeftSquareBracket<'a> { + pub whitespace_after: ParenthesizableWhitespace<'a>, + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for LeftSquareBracket<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("["); + self.whitespace_after.codegen(state); + } +} + +impl<'a> Inflate<'a> for LeftSquareBracket<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct RightSquareBracket<'a> { + pub whitespace_before: ParenthesizableWhitespace<'a>, + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for RightSquareBracket<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token("]"); + } +} + +impl<'a> Inflate<'a> for RightSquareBracket<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct SetComp<'a> { + pub elt: Box>, + pub for_in: Box>, + pub lbrace: LeftCurlyBrace<'a>, + pub rbrace: RightCurlyBrace<'a>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for SetComp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; + self.elt = self.elt.inflate(config)?; + self.for_in = self.for_in.inflate(config)?; + self.rbrace = self.rbrace.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for SetComp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.lbrace.codegen(state); + self.elt.codegen(state); + self.for_in.codegen(state); + self.rbrace.codegen(state); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct DictComp<'a> { + pub key: Box>, + pub value: Box>, + pub for_in: Box>, + pub lbrace: LeftCurlyBrace<'a>, + pub rbrace: RightCurlyBrace<'a>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_before_colon: ParenthesizableWhitespace<'a>, + pub whitespace_after_colon: ParenthesizableWhitespace<'a>, + + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for DictComp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; + self.key = self.key.inflate(config)?; + self.whitespace_before_colon = parse_parenthesizable_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after_colon = parse_parenthesizable_whitespace( + config, + &mut (*self.colon_tok).whitespace_after.borrow_mut(), + )?; + self.value = self.value.inflate(config)?; + self.for_in = self.for_in.inflate(config)?; + self.rbrace = self.rbrace.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for DictComp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.lbrace.codegen(state); + self.key.codegen(state); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.whitespace_after_colon.codegen(state); + self.value.codegen(state); + self.for_in.codegen(state); + self.rbrace.codegen(state); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct LeftCurlyBrace<'a> { + pub whitespace_after: ParenthesizableWhitespace<'a>, + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for LeftCurlyBrace<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for LeftCurlyBrace<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("{"); + self.whitespace_after.codegen(state); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct RightCurlyBrace<'a> { + pub whitespace_before: ParenthesizableWhitespace<'a>, + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for RightCurlyBrace<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for RightCurlyBrace<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token("}"); + } +} + +impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct CompFor<'a> { + pub target: AssignTargetExpression<'a>, + pub iter: Expression<'a>, + pub ifs: Vec>, + pub inner_for_in: Option>>, + pub asynchronous: Option>, + pub whitespace_before: ParenthesizableWhitespace<'a>, + pub whitespace_after_for: ParenthesizableWhitespace<'a>, + pub whitespace_before_in: ParenthesizableWhitespace<'a>, + pub whitespace_after_in: ParenthesizableWhitespace<'a>, + + pub(crate) async_tok: Option>, + pub(crate) for_tok: TokenRef<'a>, + pub(crate) in_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for CompFor<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + if let Some(asynchronous) = &self.asynchronous { + asynchronous.codegen(state); + } + state.add_token("for"); + self.whitespace_after_for.codegen(state); + self.target.codegen(state); + self.whitespace_before_in.codegen(state); + state.add_token("in"); + self.whitespace_after_in.codegen(state); + self.iter.codegen(state); + for if_ in &self.ifs { + if_.codegen(state); + } + if let Some(inner) = &self.inner_for_in { + inner.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for CompFor<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.for_tok).whitespace_before.borrow_mut(), + )?; + if let (Some(asy_tok), Some(asy)) = (self.async_tok.as_mut(), self.asynchronous.as_mut()) { + // If there is an async keyword, the start of the CompFor expression is + // considered to be this keyword, so whitespace_before needs to adjust but + // Asynchronous will own the whitespace before the for token. + asy.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut asy_tok.whitespace_before.borrow_mut(), + )?; + swap(&mut asy.whitespace_after, &mut self.whitespace_before); + } + self.whitespace_after_for = parse_parenthesizable_whitespace( + config, + &mut (*self.for_tok).whitespace_after.borrow_mut(), + )?; + self.target = self.target.inflate(config)?; + self.whitespace_before_in = parse_parenthesizable_whitespace( + config, + &mut (*self.in_tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after_in = parse_parenthesizable_whitespace( + config, + &mut (*self.in_tok).whitespace_after.borrow_mut(), + )?; + self.iter = self.iter.inflate(config)?; + self.ifs = self.ifs.inflate(config)?; + self.inner_for_in = self.inner_for_in.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Asynchronous<'a> { + pub whitespace_after: ParenthesizableWhitespace<'a>, +} + +impl<'a> Codegen<'a> for Asynchronous<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("async"); + self.whitespace_after.codegen(state); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct CompIf<'a> { + pub test: Expression<'a>, + pub whitespace_before: ParenthesizableWhitespace<'a>, + pub whitespace_before_test: ParenthesizableWhitespace<'a>, + + pub(crate) if_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for CompIf<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token("if"); + self.whitespace_before_test.codegen(state); + self.test.codegen(state); + } +} + +impl<'a> Inflate<'a> for CompIf<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.if_tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_before_test = parse_parenthesizable_whitespace( + config, + &mut (*self.if_tok).whitespace_after.borrow_mut(), + )?; + self.test = self.test.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct List<'a> { + pub elements: Vec>, + pub lbracket: LeftSquareBracket<'a>, + pub rbracket: RightSquareBracket<'a>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for List<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbracket = self.lbracket.inflate(config)?; + let len = self.elements.len(); + self.elements = self + .elements + .into_iter() + .enumerate() + .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) + .collect::>()?; + if !self.elements.is_empty() { + // lbracket owns all the whitespace if there are no elements + self.rbracket = self.rbracket.inflate(config)?; + } + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for List<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.lbracket.codegen(state); + let len = self.elements.len(); + for (idx, el) in self.elements.iter().enumerate() { + el.codegen(state, idx < len - 1, true); + } + self.rbracket.codegen(state); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Set<'a> { + pub elements: Vec>, + pub lbrace: LeftCurlyBrace<'a>, + pub rbrace: RightCurlyBrace<'a>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for Set<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; + let len = self.elements.len(); + self.elements = self + .elements + .into_iter() + .enumerate() + .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) + .collect::>()?; + if !self.elements.is_empty() { + self.rbrace = self.rbrace.inflate(config)?; + } + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Set<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.lbrace.codegen(state); + let len = self.elements.len(); + for (idx, el) in self.elements.iter().enumerate() { + el.codegen(state, idx < len - 1, true); + } + self.rbrace.codegen(state); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Dict<'a> { + pub elements: Vec>, + pub lbrace: LeftCurlyBrace<'a>, + pub rbrace: RightCurlyBrace<'a>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for Dict<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; + let len = self.elements.len(); + self.elements = self + .elements + .into_iter() + .enumerate() + .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) + .collect::>()?; + if !self.elements.is_empty() { + self.rbrace = self.rbrace.inflate(config)?; + } + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Dict<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.lbrace.codegen(state); + let len = self.elements.len(); + for (idx, el) in self.elements.iter().enumerate() { + el.codegen(state, idx < len - 1, true); + } + self.rbrace.codegen(state); + }) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum DictElement<'a> { + Simple { + key: Expression<'a>, + value: Expression<'a>, + comma: Option>, + whitespace_before_colon: ParenthesizableWhitespace<'a>, + whitespace_after_colon: ParenthesizableWhitespace<'a>, + colon_tok: TokenRef<'a>, + }, + Starred(StarredDictElement<'a>), +} + +// TODO: this could be a derive helper attribute to override the python class name +impl<'a> IntoPy for DictElement<'a> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + match self { + Self::Starred(s) => s.into_py(py), + Self::Simple { + key, + value, + comma, + whitespace_after_colon, + whitespace_before_colon, + .. + } => { + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + let kwargs = [ + Some(("key", key.into_py(py))), + Some(("value", value.into_py(py))), + Some(( + "whitespace_before_colon", + whitespace_before_colon.into_py(py), + )), + Some(("whitespace_after_colon", whitespace_after_colon.into_py(py))), + comma.map(|x| ("comma", x.into_py(py))), + ] + .iter() + .filter(|x| x.is_some()) + .map(|x| x.as_ref().unwrap()) + .collect::>() + .into_py_dict(py); + libcst + .getattr("DictElement") + .expect("no Element found in libcst") + .call((), Some(kwargs)) + .expect("conversion failed") + .into() + } + } + } +} + +impl<'a> DictElement<'a> { + pub fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result { + Ok(match self { + Self::Starred(s) => Self::Starred(s.inflate_element(config, last_element)?), + Self::Simple { + key, + value, + comma, + colon_tok, + .. + } => { + let whitespace_before_colon = parse_parenthesizable_whitespace( + config, + &mut colon_tok.whitespace_before.borrow_mut(), + )?; + let whitespace_after_colon = parse_parenthesizable_whitespace( + config, + &mut colon_tok.whitespace_after.borrow_mut(), + )?; + Self::Simple { + key: key.inflate(config)?, + whitespace_before_colon, + whitespace_after_colon, + value: value.inflate(config)?, + comma: if last_element { + comma.map(|c| c.inflate_before(config)).transpose() + } else { + comma.inflate(config) + }?, + colon_tok, + } + } + }) + } +} + +impl<'a> DictElement<'a> { + fn codegen( + &self, + state: &mut CodegenState<'a>, + default_comma: bool, + default_comma_whitespace: bool, + ) { + match self { + Self::Simple { + key, + value, + comma, + whitespace_before_colon, + whitespace_after_colon, + .. + } => { + key.codegen(state); + whitespace_before_colon.codegen(state); + state.add_token(":"); + whitespace_after_colon.codegen(state); + value.codegen(state); + if let Some(comma) = comma { + comma.codegen(state) + } + } + Self::Starred(s) => s.codegen(state), + } + let maybe_comma = match self { + Self::Simple { comma, .. } => comma, + Self::Starred(s) => &s.comma, + }; + if maybe_comma.is_none() && default_comma { + state.add_token(if default_comma_whitespace { ", " } else { "," }); + } + } +} + +impl<'a> WithComma<'a> for DictElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { + let comma = Some(comma); + match self { + Self::Starred(s) => Self::Starred(StarredDictElement { comma, ..s }), + Self::Simple { + key, + value, + whitespace_before_colon, + whitespace_after_colon, + colon_tok, + .. + } => Self::Simple { + comma, + key, + value, + whitespace_after_colon, + whitespace_before_colon, + colon_tok, + }, + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct StarredDictElement<'a> { + pub value: Expression<'a>, + pub comma: Option>, + pub whitespace_before_value: ParenthesizableWhitespace<'a>, + + pub(crate) star_tok: TokenRef<'a>, +} + +impl<'a> StarredDictElement<'a> { + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.whitespace_before_value = parse_parenthesizable_whitespace( + config, + &mut (*self.star_tok).whitespace_after.borrow_mut(), + )?; + self.value = self.value.inflate(config)?; + self.comma = if last_element { + self.comma.map(|c| c.inflate_before(config)).transpose() + } else { + self.comma.inflate(config) + }?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for StarredDictElement<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("**"); + self.whitespace_before_value.codegen(state); + self.value.codegen(state); + if let Some(comma) = &self.comma { + comma.codegen(state); + } + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] +pub enum BaseSlice<'a> { + Index(Index<'a>), + Slice(Slice<'a>), +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Index<'a> { + pub value: Expression<'a>, +} + +impl<'a> Inflate<'a> for Index<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.value = self.value.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Index<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.value.codegen(state); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Slice<'a> { + #[no_py_default] + pub lower: Option>, + #[no_py_default] + pub upper: Option>, + pub step: Option>, + pub first_colon: Colon<'a>, + pub second_colon: Option>, +} + +impl<'a> Inflate<'a> for Slice<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lower = self.lower.inflate(config)?; + self.first_colon = self.first_colon.inflate(config)?; + self.upper = self.upper.inflate(config)?; + self.second_colon = self.second_colon.inflate(config)?; + self.step = self.step.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Slice<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + if let Some(lower) = &self.lower { + lower.codegen(state); + } + self.first_colon.codegen(state); + if let Some(upper) = &self.upper { + upper.codegen(state); + } + if let Some(second_colon) = &self.second_colon { + second_colon.codegen(state); + } else if self.step.is_some() { + state.add_token(";"); + } + if let Some(step) = &self.step { + step.codegen(state); + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct SubscriptElement<'a> { + pub slice: BaseSlice<'a>, + pub comma: Option>, +} + +impl<'a> Inflate<'a> for SubscriptElement<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.slice = self.slice.inflate(config)?; + self.comma = self.comma.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for SubscriptElement<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.slice.codegen(state); + if let Some(comma) = &self.comma { + comma.codegen(state); + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Subscript<'a> { + pub value: Box>, + pub slice: Vec>, + pub lbracket: LeftSquareBracket<'a>, + pub rbracket: RightSquareBracket<'a>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_after_value: ParenthesizableWhitespace<'a>, + + pub(crate) lbracket_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Subscript<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.value = self.value.inflate(config)?; + self.whitespace_after_value = parse_parenthesizable_whitespace( + config, + &mut (*self.lbracket_tok).whitespace_before.borrow_mut(), + )?; + self.lbracket = self.lbracket.inflate(config)?; + self.slice = self.slice.inflate(config)?; + self.rbracket = self.rbracket.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Subscript<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.value.codegen(state); + self.whitespace_after_value.codegen(state); + self.lbracket.codegen(state); + let len = self.slice.len(); + for (i, slice) in self.slice.iter().enumerate() { + slice.codegen(state); + if slice.comma.is_none() && i + 1 < len { + state.add_token(", ") + } + } + self.rbracket.codegen(state); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct IfExp<'a> { + pub test: Box>, + pub body: Box>, + pub orelse: Box>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_before_if: ParenthesizableWhitespace<'a>, + pub whitespace_after_if: ParenthesizableWhitespace<'a>, + pub whitespace_before_else: ParenthesizableWhitespace<'a>, + pub whitespace_after_else: ParenthesizableWhitespace<'a>, + + pub(crate) if_tok: TokenRef<'a>, + pub(crate) else_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for IfExp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.body = self.body.inflate(config)?; + self.whitespace_before_if = parse_parenthesizable_whitespace( + config, + &mut (*self.if_tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after_if = parse_parenthesizable_whitespace( + config, + &mut (*self.if_tok).whitespace_after.borrow_mut(), + )?; + self.test = self.test.inflate(config)?; + self.whitespace_before_else = parse_parenthesizable_whitespace( + config, + &mut (*self.else_tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after_else = parse_parenthesizable_whitespace( + config, + &mut (*self.else_tok).whitespace_after.borrow_mut(), + )?; + self.orelse = self.orelse.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for IfExp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.body.codegen(state); + self.whitespace_before_if.codegen(state); + state.add_token("if"); + self.whitespace_after_if.codegen(state); + self.test.codegen(state); + self.whitespace_before_else.codegen(state); + state.add_token("else"); + self.whitespace_after_else.codegen(state); + self.orelse.codegen(state); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Lambda<'a> { + pub params: Box>, + pub body: Box>, + pub colon: Colon<'a>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_after_lambda: Option>, + + pub(crate) lambda_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Lambda<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + if !self.params.is_empty() { + self.whitespace_after_lambda = Some(parse_parenthesizable_whitespace( + config, + &mut (*self.lambda_tok).whitespace_after.borrow_mut(), + )?); + } + self.params = self.params.inflate(config)?; + adjust_parameters_trailing_whitespace(config, &mut self.params, &self.colon.tok)?; + self.colon = self.colon.inflate(config)?; + self.body = self.body.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Lambda<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token("lambda"); + if let Some(ws) = &self.whitespace_after_lambda { + ws.codegen(state); + } else if !self.params.is_empty() { + // there's one or more params, add a space + state.add_token(" ") + } + self.params.codegen(state); + self.colon.codegen(state); + self.body.codegen(state); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct From<'a> { + pub item: Expression<'a>, + pub whitespace_before_from: Option>, + pub whitespace_after_from: ParenthesizableWhitespace<'a>, + + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> From<'a> { + pub fn codegen(&self, state: &mut CodegenState<'a>, default_space: &'a str) { + if let Some(ws) = &self.whitespace_before_from { + ws.codegen(state); + } else { + state.add_token(default_space); + } + state.add_token("from"); + self.whitespace_after_from.codegen(state); + self.item.codegen(state); + } +} + +impl<'a> Inflate<'a> for From<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before_from = Some(parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?); + self.whitespace_after_from = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + self.item = self.item.inflate(config)?; + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub enum YieldValue<'a> { + Expression(Expression<'a>), + From(From<'a>), +} + +impl<'a> Inflate<'a> for YieldValue<'a> { + fn inflate(self, config: &Config<'a>) -> Result { + Ok(match self { + Self::Expression(e) => Self::Expression(e.inflate(config)?), + Self::From(e) => { + let mut e = e.inflate(config)?; + e.whitespace_before_from = None; + Self::From(e) + } + }) + } +} + +impl<'a> YieldValue<'a> { + fn codegen(&self, state: &mut CodegenState<'a>, default_space: &'a str) { + match self { + Self::Expression(e) => e.codegen(state), + Self::From(f) => f.codegen(state, default_space), + } + } +} + +impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Yield<'a> { + pub value: Option>>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_after_yield: Option>, + + pub(crate) yield_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Yield<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + if self.value.is_some() { + self.whitespace_after_yield = Some(parse_parenthesizable_whitespace( + config, + &mut (*self.yield_tok).whitespace_after.borrow_mut(), + )?); + } + self.value = self.value.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Yield<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token("yield"); + if let Some(ws) = &self.whitespace_after_yield { + ws.codegen(state); + } else if self.value.is_some() { + state.add_token(" "); + } + + if let Some(val) = &self.value { + val.codegen(state, "") + } + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct Await<'a> { + pub expression: Box>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_after_await: ParenthesizableWhitespace<'a>, + + pub(crate) await_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Await<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.whitespace_after_await = parse_parenthesizable_whitespace( + config, + &mut (*self.await_tok).whitespace_after.borrow_mut(), + )?; + self.expression = self.expression.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Await<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token("await"); + self.whitespace_after_await.codegen(state); + self.expression.codegen(state); + }) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] +pub enum String<'a> { + Simple(SimpleString<'a>), + Concatenated(ConcatenatedString<'a>), + Formatted(FormattedString<'a>), +} + +impl<'a> std::convert::From> for Expression<'a> { + fn from(s: String<'a>) -> Self { + match s { + String::Simple(s) => Self::SimpleString(s), + String::Concatenated(s) => Self::ConcatenatedString(s), + String::Formatted(s) => Self::FormattedString(s), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct ConcatenatedString<'a> { + pub left: Box>, + pub right: Box>, + pub lpar: Vec>, + pub rpar: Vec>, + pub whitespace_between: ParenthesizableWhitespace<'a>, + + // we capture the next token after each string piece so Inflate can extract the + // whitespace between individual pieces + pub(crate) right_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for ConcatenatedString<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.left = self.left.inflate(config)?; + self.whitespace_between = parse_parenthesizable_whitespace( + config, + &mut (*self.right_tok).whitespace_before.borrow_mut(), + )?; + self.right = self.right.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for ConcatenatedString<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.left.codegen(state); + self.whitespace_between.codegen(state); + self.right.codegen(state); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode, IntoPy)] +pub struct SimpleString<'a> { + /// The texual representation of the string, including quotes, prefix + /// characters, and any escape characters present in the original source code, + /// such as ``r"my string\n"``. + pub value: &'a str, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for SimpleString<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for SimpleString<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| state.add_token(self.value)) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct FormattedStringText<'a> { + pub value: &'a str, +} + +impl<'a> Inflate<'a> for FormattedStringText<'a> { + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(self) + } +} + +impl<'a> Codegen<'a> for FormattedStringText<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token(self.value); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct FormattedStringExpression<'a> { + pub expression: Expression<'a>, + pub conversion: Option<&'a str>, + pub format_spec: Option>>, + pub whitespace_before_expression: ParenthesizableWhitespace<'a>, + pub whitespace_after_expression: ParenthesizableWhitespace<'a>, + pub equal: Option>, + + pub(crate) lbrace_tok: TokenRef<'a>, + // This is None if there's an equal sign, otherwise it's the first token of + // (conversion, format spec, right brace) in that order + pub(crate) after_expr_tok: Option>, +} + +impl<'a> Inflate<'a> for FormattedStringExpression<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before_expression = parse_parenthesizable_whitespace( + config, + &mut (*self.lbrace_tok).whitespace_after.borrow_mut(), + )?; + self.expression = self.expression.inflate(config)?; + self.equal = self.equal.inflate(config)?; + if let Some(after_expr_tok) = self.after_expr_tok.as_mut() { + self.whitespace_after_expression = parse_parenthesizable_whitespace( + config, + &mut after_expr_tok.whitespace_before.borrow_mut(), + )?; + } + self.format_spec = self.format_spec.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for FormattedStringExpression<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("{"); + self.whitespace_before_expression.codegen(state); + self.expression.codegen(state); + if let Some(eq) = &self.equal { + eq.codegen(state); + } + self.whitespace_after_expression.codegen(state); + if let Some(conv) = &self.conversion { + state.add_token("!"); + state.add_token(conv); + } + if let Some(specs) = &self.format_spec { + state.add_token(":"); + for spec in specs { + spec.codegen(state); + } + } + state.add_token("}"); + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] +pub enum FormattedStringContent<'a> { + Text(FormattedStringText<'a>), + Expression(FormattedStringExpression<'a>), +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct FormattedString<'a> { + pub parts: Vec>, + pub start: &'a str, + pub end: &'a str, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Inflate<'a> for FormattedString<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.parts = self.parts.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for FormattedString<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token(self.start); + for part in &self.parts { + part.codegen(state); + } + state.add_token(self.end); + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +pub struct NamedExpr<'a> { + pub target: Box>, + pub value: Box>, + pub lpar: Vec>, + pub rpar: Vec>, + + pub whitespace_before_walrus: ParenthesizableWhitespace<'a>, + pub whitespace_after_walrus: ParenthesizableWhitespace<'a>, + + pub(crate) walrus_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for NamedExpr<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.target.codegen(state); + self.whitespace_before_walrus.codegen(state); + state.add_token(":="); + self.whitespace_after_walrus.codegen(state); + self.value.codegen(state); + }) + } +} + +impl<'a> Inflate<'a> for NamedExpr<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.target = self.target.inflate(config)?; + self.whitespace_before_walrus = parse_parenthesizable_whitespace( + config, + &mut self.walrus_tok.whitespace_before.borrow_mut(), + )?; + self.whitespace_after_walrus = parse_parenthesizable_whitespace( + config, + &mut self.walrus_tok.whitespace_after.borrow_mut(), + )?; + self.value = self.value.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} diff --git a/native/libcst/src/nodes/inflate_helpers.rs b/native/libcst/src/nodes/inflate_helpers.rs new file mode 100644 index 00000000..902ad032 --- /dev/null +++ b/native/libcst/src/nodes/inflate_helpers.rs @@ -0,0 +1,34 @@ +use crate::{ + nodes::traits::Result, + tokenizer::{ + whitespace_parser::{parse_parenthesizable_whitespace, Config}, + Token, + }, + Param, Parameters, StarArg, +}; + +pub(crate) fn adjust_parameters_trailing_whitespace<'a>( + config: &Config<'a>, + parameters: &mut Parameters<'a>, + next_tok: &Token<'a>, +) -> Result<()> { + let do_adjust = |param: &mut Param<'a>| -> Result<()> { + let whitespace_after = + parse_parenthesizable_whitespace(config, &mut next_tok.whitespace_before.borrow_mut())?; + if param.comma.is_none() { + param.whitespace_after_param = whitespace_after; + } + Ok(()) + }; + + if let Some(param) = &mut parameters.star_kwarg { + do_adjust(param)?; + } else if let Some(param) = parameters.kwonly_params.last_mut() { + do_adjust(param)?; + } else if let Some(StarArg::Param(param)) = parameters.star_arg.as_mut() { + do_adjust(param)?; + } else if let Some(param) = parameters.params.last_mut() { + do_adjust(param)?; + } + Ok(()) +} diff --git a/native/libcst/src/nodes/macros.rs b/native/libcst/src/nodes/macros.rs new file mode 100644 index 00000000..1c47e3fa --- /dev/null +++ b/native/libcst/src/nodes/macros.rs @@ -0,0 +1,33 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +/// Generates a function that lazily imports and caches a module's member. This will hold a +/// permanent reference to the imported member. Python's module cache is rarely purged though, so +/// it typically won't matter. +/// +/// This cache is cheaper than looking up the module in python's module cache inspecting the +/// module's `__dict__` each time you want access to the member. +/// +/// If you have multiple imports from the same module, we'll call `py.import` once for each member +/// of the module. +#[macro_export] +macro_rules! py_import { + ( $module_name:expr, $member_name:expr, $getter_fn:ident ) => { + paste::paste! { + static [] + : pyo3::once_cell::GILOnceCell> + = pyo3::once_cell::GILOnceCell::new(); + + fn $getter_fn<'py>(py: pyo3::Python<'py>) -> pyo3::PyResult<&'py pyo3::PyAny> { + Ok([].get_or_init(py, || { + Ok(py.import($module_name)?.get($member_name)?.to_object(py)) + }) + .as_ref() + .map_err(|err| err.clone_ref(py))? + .as_ref(py)) + } + } + }; +} diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs new file mode 100644 index 00000000..43981e87 --- /dev/null +++ b/native/libcst/src/nodes/mod.rs @@ -0,0 +1,43 @@ +mod whitespace; +pub use whitespace::{ + Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, + SimpleWhitespace, TrailingWhitespace, +}; +mod statement; +pub use statement::{ + AnnAssign, Annotation, AsName, Assert, Assign, AssignTarget, AssignTargetExpression, AugAssign, + Break, ClassDef, CompoundStatement, Continue, Decorator, Del, DelTargetExpression, Else, + ExceptHandler, Expr, Finally, For, FunctionDef, Global, If, Import, ImportAlias, ImportFrom, + ImportNames, IndentedBlock, NameItem, Nonlocal, OrElse, Pass, Raise, Return, + SimpleStatementLine, SimpleStatementSuite, SmallStatement, Statement, Suite, Try, While, With, + WithItem, +}; + +mod expression; +pub use expression::{ + Arg, Asynchronous, Attribute, Await, BaseSlice, BinaryOperation, BooleanOperation, Call, + CompFor, CompIf, Comparison, ComparisonTarget, ConcatenatedString, Dict, DictComp, DictElement, + Element, Ellipsis, Expression, Float, FormattedString, FormattedStringContent, + FormattedStringExpression, FormattedStringText, From, GeneratorExp, IfExp, Imaginary, Index, + Integer, Lambda, LeftCurlyBrace, LeftParen, LeftSquareBracket, List, ListComp, Name, + NameOrAttribute, NamedExpr, Param, ParamSlash, ParamStar, Parameters, RightCurlyBrace, + RightParen, RightSquareBracket, Set, SetComp, SimpleString, Slice, StarArg, StarredDictElement, + StarredElement, String, Subscript, SubscriptElement, Tuple, UnaryOperation, Yield, YieldValue, +}; + +mod op; +pub use op::{ + AssignEqual, AugOp, BinaryOp, BooleanOp, Colon, Comma, CompOp, Dot, ImportStar, Semicolon, + UnaryOp, +}; + +mod module; +pub use module::Module; + +mod codegen; +pub use codegen::{Codegen, CodegenState}; + +mod traits; +pub use traits::{Inflate, ParenthesizedNode, WithComma, WithLeadingLines}; + +pub(crate) mod inflate_helpers; diff --git a/native/libcst/src/nodes/module.rs b/native/libcst/src/nodes/module.rs new file mode 100644 index 00000000..8040be64 --- /dev/null +++ b/native/libcst/src/nodes/module.rs @@ -0,0 +1,92 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::mem::swap; +use std::rc::Rc; + +use crate::tokenizer::whitespace_parser::parse_empty_lines; +use crate::tokenizer::Token; +use crate::{ + nodes::{ + codegen::{Codegen, CodegenState}, + statement::Statement, + whitespace::EmptyLine, + }, + tokenizer::whitespace_parser::Config, +}; +use libcst_derive::IntoPy; + +use super::traits::{Inflate, Result, WithLeadingLines}; + +type TokenRef<'a> = Rc>; + +#[derive(Debug, Eq, PartialEq, IntoPy)] +pub struct Module<'a> { + pub body: Vec>, + pub header: Vec>, + pub footer: Vec>, + + pub default_indent: &'a str, + pub default_newline: &'a str, + pub has_trailing_newline: bool, + pub encoding: String, + + pub(crate) eof_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Module<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for h in &self.header { + h.codegen(state); + } + for s in &self.body { + s.codegen(state); + } + for nl in &self.footer { + nl.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for Module<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.default_indent = config.default_indent; + self.default_newline = config.default_newline; + self.has_trailing_newline = config.has_trailing_newline(); + self.body = self.body.inflate(config)?; + let mut footer = parse_empty_lines( + config, + &mut (*self.eof_tok).whitespace_before.borrow_mut(), + Some(""), + )?; + let mut header = vec![]; + if let Some(stmt) = self.body.first_mut() { + swap(stmt.leading_lines(), &mut header); + let mut last_indented = None; + for (num, line) in footer.iter().enumerate() { + if !line.whitespace.0.is_empty() { + last_indented = Some(num); + } else if line.comment.is_some() { + // This is a non-indented comment. Everything from here should belong in the + // footer. + break; + } + } + if let Some(num) = last_indented { + if num + 1 == footer.len() { + footer = vec![]; + } else { + let (_, rest) = footer.split_at(num + 1); + footer = rest.to_vec(); + } + } + } else { + swap(&mut header, &mut footer); + } + self.footer = footer; + self.header = header; + Ok(self) + } +} diff --git a/native/libcst/src/nodes/op.rs b/native/libcst/src/nodes/op.rs new file mode 100644 index 00000000..ef09e0a0 --- /dev/null +++ b/native/libcst/src/nodes/op.rs @@ -0,0 +1,1420 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::rc::Rc; + +use super::{whitespace::ParenthesizableWhitespace, Codegen, CodegenState}; +use crate::{ + nodes::traits::{Inflate, Result}, + tokenizer::{ + whitespace_parser::{parse_parenthesizable_whitespace, parse_simple_whitespace, Config}, + Token, + }, +}; +use libcst_derive::IntoPy; + +type TokenRef<'a> = Rc>; + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub struct Semicolon<'a> { + /// Any space that appears directly before this semicolon. + pub whitespace_before: ParenthesizableWhitespace<'a>, + /// Any space that appears directly after this semicolon. + pub whitespace_after: ParenthesizableWhitespace<'a>, + + #[skip_py] + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Semicolon<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token(";"); + self.whitespace_after.codegen(state); + } +} + +impl<'a> Inflate<'a> for Semicolon<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = ParenthesizableWhitespace::SimpleWhitespace( + parse_simple_whitespace(config, &mut (*self.tok).whitespace_before.borrow_mut())?, + ); + self.whitespace_after = ParenthesizableWhitespace::SimpleWhitespace( + parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?, + ); + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Comma<'a> { + /// Any space that appears directly before this comma. + pub whitespace_before: ParenthesizableWhitespace<'a>, + /// Any space that appears directly after this comma. + pub whitespace_after: ParenthesizableWhitespace<'a>, + + #[skip_py] + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Comma<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token(","); + self.whitespace_after.codegen(state); + } +} + +impl<'a> Inflate<'a> for Comma<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + Ok(self) + } +} + +impl<'a> Comma<'a> { + pub fn inflate_before(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct AssignEqual<'a> { + /// Any space that appears directly before this equal sign. + pub whitespace_before: ParenthesizableWhitespace<'a>, + /// Any space that appears directly after this equal sign. + pub whitespace_after: ParenthesizableWhitespace<'a>, + + #[skip_py] + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for AssignEqual<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token("="); + self.whitespace_after.codegen(state); + } +} + +impl<'a> Inflate<'a> for AssignEqual<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + Ok(self) + } +} + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub struct Dot<'a> { + /// Any space that appears directly before this dot. + pub whitespace_before: ParenthesizableWhitespace<'a>, + /// Any space that appears directly after this dot. + pub whitespace_after: ParenthesizableWhitespace<'a>, + + #[skip_py] + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Dot<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token("."); + self.whitespace_after.codegen(state); + } +} + +impl<'a> Inflate<'a> for Dot<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.inflate_before(config)?; + self.inflate_after(config)?; + Ok(self) + } +} + +impl<'a> Dot<'a> { + fn inflate_before(&mut self, config: &Config<'a>) -> Result<()> { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + Ok(()) + } + + fn inflate_after(&mut self, config: &Config<'a>) -> Result<()> { + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + Ok(()) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct ImportStar {} + +impl<'a> Codegen<'a> for ImportStar { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("*"); + } +} + +impl<'a> Inflate<'a> for ImportStar { + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub enum UnaryOp<'a> { + Plus { + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Minus { + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + BitInvert { + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Not { + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, +} + +impl<'a> Codegen<'a> for UnaryOp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + let (tok, whitespace_after) = match self { + Self::Plus { + whitespace_after, .. + } => ("+", whitespace_after), + Self::Minus { + whitespace_after, .. + } => ("-", whitespace_after), + Self::BitInvert { + whitespace_after, .. + } => ("~", whitespace_after), + Self::Not { + whitespace_after, .. + } => ("not", whitespace_after), + }; + state.add_token(tok); + whitespace_after.codegen(state); + } +} + +impl<'a> Inflate<'a> for UnaryOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { + Ok(match self { + Self::Plus { tok, .. } => { + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Plus { + whitespace_after, + tok, + } + } + Self::Minus { tok, .. } => { + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Minus { + whitespace_after, + tok, + } + } + Self::BitInvert { tok, .. } => { + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::BitInvert { + whitespace_after, + tok, + } + } + Self::Not { tok, .. } => { + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Not { + whitespace_after, + tok, + } + } + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub enum BooleanOp<'a> { + And { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Or { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, +} + +impl<'a> Codegen<'a> for BooleanOp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + let (tok, ws_bef, ws_aft) = match self { + Self::And { + whitespace_after, + whitespace_before, + .. + } => ("and", whitespace_before, whitespace_after), + Self::Or { + whitespace_after, + whitespace_before, + .. + } => ("or", whitespace_before, whitespace_after), + }; + ws_bef.codegen(state); + state.add_token(tok); + ws_aft.codegen(state); + } +} + +impl<'a> Inflate<'a> for BooleanOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { + Ok(match self { + Self::And { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::And { + whitespace_before, + whitespace_after, + tok, + } + } + Self::Or { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Or { + whitespace_before, + whitespace_after, + tok, + } + } + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub enum BinaryOp<'a> { + Add { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Subtract { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Multiply { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Divide { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + FloorDivide { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Modulo { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Power { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + LeftShift { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + RightShift { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + BitOr { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + BitAnd { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + BitXor { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + MatrixMultiply { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, +} + +impl<'a> Codegen<'a> for BinaryOp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + let (whitespace_before, whitespace_after, tok) = match self { + Self::Add { + whitespace_before, + whitespace_after, + tok, + } + | Self::Subtract { + whitespace_before, + whitespace_after, + tok, + } + | Self::Multiply { + whitespace_before, + whitespace_after, + tok, + } + | Self::Divide { + whitespace_before, + whitespace_after, + tok, + } + | Self::FloorDivide { + whitespace_before, + whitespace_after, + tok, + } + | Self::Modulo { + whitespace_before, + whitespace_after, + tok, + } + | Self::Power { + whitespace_before, + whitespace_after, + tok, + } + | Self::LeftShift { + whitespace_before, + whitespace_after, + tok, + } + | Self::RightShift { + whitespace_before, + whitespace_after, + tok, + } + | Self::BitOr { + whitespace_before, + whitespace_after, + tok, + } + | Self::BitAnd { + whitespace_before, + whitespace_after, + tok, + } + | Self::BitXor { + whitespace_before, + whitespace_after, + tok, + } + | Self::MatrixMultiply { + whitespace_before, + whitespace_after, + tok, + } => (whitespace_before, whitespace_after, tok), + }; + whitespace_before.codegen(state); + state.add_token(tok.string); + whitespace_after.codegen(state); + } +} + +impl<'a> Inflate<'a> for BinaryOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { + Ok(match self { + Self::Add { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Add { + whitespace_before, + whitespace_after, + tok, + } + } + Self::Subtract { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Subtract { + whitespace_before, + whitespace_after, + tok, + } + } + Self::Multiply { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Multiply { + whitespace_before, + whitespace_after, + tok, + } + } + Self::Divide { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Divide { + whitespace_before, + whitespace_after, + tok, + } + } + Self::FloorDivide { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::FloorDivide { + whitespace_before, + whitespace_after, + tok, + } + } + Self::Modulo { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Modulo { + whitespace_before, + whitespace_after, + tok, + } + } + Self::Power { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Power { + whitespace_before, + whitespace_after, + tok, + } + } + Self::LeftShift { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::LeftShift { + whitespace_before, + whitespace_after, + tok, + } + } + Self::RightShift { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::RightShift { + whitespace_before, + whitespace_after, + tok, + } + } + Self::BitOr { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::BitOr { + whitespace_before, + whitespace_after, + tok, + } + } + Self::BitAnd { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::BitAnd { + whitespace_before, + whitespace_after, + tok, + } + } + Self::BitXor { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::BitXor { + whitespace_before, + whitespace_after, + tok, + } + } + Self::MatrixMultiply { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::MatrixMultiply { + whitespace_before, + whitespace_after, + tok, + } + } + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub enum CompOp<'a> { + LessThan { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + GreaterThan { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + LessThanEqual { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + GreaterThanEqual { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + Equal { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + NotEqual { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + In { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + NotIn { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_between: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + not_tok: TokenRef<'a>, + #[skip_py] + in_tok: TokenRef<'a>, + }, + Is { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + IsNot { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_between: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + is_tok: TokenRef<'a>, + #[skip_py] + not_tok: TokenRef<'a>, + }, +} + +impl<'a> Codegen<'a> for CompOp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + let (bef, aft, first_tok, between) = match self { + Self::LessThan { + whitespace_before, + whitespace_after, + tok, + } + | Self::GreaterThan { + whitespace_before, + whitespace_after, + tok, + } + | Self::LessThanEqual { + whitespace_before, + whitespace_after, + tok, + } + | Self::GreaterThanEqual { + whitespace_before, + whitespace_after, + tok, + } + | Self::Equal { + whitespace_before, + whitespace_after, + tok, + } + | Self::NotEqual { + whitespace_before, + whitespace_after, + tok, + } + | Self::In { + whitespace_before, + whitespace_after, + tok, + } + | Self::Is { + whitespace_before, + whitespace_after, + tok, + } => (whitespace_before, whitespace_after, tok, None), + Self::IsNot { + whitespace_before, + whitespace_between, + whitespace_after, + is_tok, + not_tok, + } => ( + whitespace_before, + whitespace_after, + is_tok, + Some((whitespace_between, not_tok)), + ), + Self::NotIn { + whitespace_before, + whitespace_between, + whitespace_after, + not_tok, + in_tok, + } => ( + whitespace_before, + whitespace_after, + not_tok, + Some((whitespace_between, in_tok)), + ), + }; + bef.codegen(state); + state.add_token(first_tok.string); + if let Some((btw, second_tok)) = between { + btw.codegen(state); + state.add_token(second_tok.string); + } + aft.codegen(state); + } +} + +impl<'a> Inflate<'a> for CompOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { + Ok(match self { + Self::LessThan { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::LessThan { + whitespace_before, + whitespace_after, + tok, + } + } + Self::GreaterThan { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::GreaterThan { + whitespace_before, + whitespace_after, + tok, + } + } + Self::LessThanEqual { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::LessThanEqual { + whitespace_before, + whitespace_after, + tok, + } + } + Self::GreaterThanEqual { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::GreaterThanEqual { + whitespace_before, + whitespace_after, + tok, + } + } + Self::Equal { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Equal { + whitespace_before, + whitespace_after, + tok, + } + } + Self::NotEqual { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::NotEqual { + whitespace_before, + whitespace_after, + tok, + } + } + Self::In { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::In { + whitespace_before, + whitespace_after, + tok, + } + } + Self::Is { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::Is { + whitespace_before, + whitespace_after, + tok, + } + } + Self::IsNot { + is_tok, not_tok, .. + } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*is_tok).whitespace_before.borrow_mut(), + )?; + let whitespace_between = parse_parenthesizable_whitespace( + config, + &mut (*is_tok).whitespace_after.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*not_tok).whitespace_after.borrow_mut(), + )?; + Self::IsNot { + whitespace_before, + whitespace_between, + whitespace_after, + is_tok, + not_tok, + } + } + Self::NotIn { + not_tok, in_tok, .. + } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*not_tok).whitespace_before.borrow_mut(), + )?; + let whitespace_between = parse_parenthesizable_whitespace( + config, + &mut (*not_tok).whitespace_after.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*in_tok).whitespace_after.borrow_mut(), + )?; + Self::NotIn { + whitespace_before, + whitespace_between, + whitespace_after, + not_tok, + in_tok, + } + } + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Colon<'a> { + pub whitespace_before: ParenthesizableWhitespace<'a>, + pub whitespace_after: ParenthesizableWhitespace<'a>, + + #[skip_py] + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Colon<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Colon<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token(":"); + self.whitespace_after.codegen(state); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub enum AugOp<'a> { + AddAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + SubtractAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + MultiplyAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + MatrixMultiplyAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + DivideAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + ModuloAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + BitAndAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + BitOrAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + BitXorAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + LeftShiftAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + RightShiftAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + PowerAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, + FloorDivideAssign { + whitespace_before: ParenthesizableWhitespace<'a>, + whitespace_after: ParenthesizableWhitespace<'a>, + #[skip_py] + tok: TokenRef<'a>, + }, +} + +impl<'a> Inflate<'a> for AugOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { + Ok(match self { + Self::AddAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::AddAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::SubtractAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::SubtractAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::MultiplyAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::MultiplyAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::MatrixMultiplyAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::MatrixMultiplyAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::DivideAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::DivideAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::ModuloAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::ModuloAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::BitAndAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::BitAndAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::BitOrAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::BitOrAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::BitXorAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::BitXorAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::LeftShiftAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::LeftShiftAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::RightShiftAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::RightShiftAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::PowerAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::PowerAssign { + whitespace_before, + whitespace_after, + tok, + } + } + Self::FloorDivideAssign { tok, .. } => { + let whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_before.borrow_mut(), + )?; + let whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*tok).whitespace_after.borrow_mut(), + )?; + Self::FloorDivideAssign { + whitespace_before, + whitespace_after, + tok, + } + } + }) + } +} + +impl<'a> Codegen<'a> for AugOp<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + let (tok, bef, aft) = match self { + Self::AddAssign { + whitespace_before, + whitespace_after, + .. + } => ("+=", whitespace_before, whitespace_after), + Self::SubtractAssign { + whitespace_before, + whitespace_after, + .. + } => ("-=", whitespace_before, whitespace_after), + Self::MultiplyAssign { + whitespace_before, + whitespace_after, + .. + } => ("*=", whitespace_before, whitespace_after), + Self::MatrixMultiplyAssign { + whitespace_before, + whitespace_after, + .. + } => ("@=", whitespace_before, whitespace_after), + Self::DivideAssign { + whitespace_before, + whitespace_after, + .. + } => ("/=", whitespace_before, whitespace_after), + Self::ModuloAssign { + whitespace_before, + whitespace_after, + .. + } => ("%=", whitespace_before, whitespace_after), + Self::BitAndAssign { + whitespace_before, + whitespace_after, + .. + } => ("&=", whitespace_before, whitespace_after), + Self::BitOrAssign { + whitespace_before, + whitespace_after, + .. + } => ("|=", whitespace_before, whitespace_after), + Self::BitXorAssign { + whitespace_before, + whitespace_after, + .. + } => ("^=", whitespace_before, whitespace_after), + Self::LeftShiftAssign { + whitespace_before, + whitespace_after, + .. + } => ("<<=", whitespace_before, whitespace_after), + Self::RightShiftAssign { + whitespace_before, + whitespace_after, + .. + } => (">>=", whitespace_before, whitespace_after), + Self::PowerAssign { + whitespace_before, + whitespace_after, + .. + } => ("**=", whitespace_before, whitespace_after), + Self::FloorDivideAssign { + whitespace_before, + whitespace_after, + .. + } => ("//=", whitespace_before, whitespace_after), + }; + bef.codegen(state); + state.add_token(tok); + aft.codegen(state); + } +} diff --git a/native/libcst/src/nodes/parser_config.rs b/native/libcst/src/nodes/parser_config.rs new file mode 100644 index 00000000..3b85eae2 --- /dev/null +++ b/native/libcst/src/nodes/parser_config.rs @@ -0,0 +1,137 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use pyo3::exceptions::PyIndexError; +use pyo3::prelude::*; +use pyo3::types::{IntoPyDict, PyDict, PySequence, PyString}; +use pyo3::wrap_pyfunction; + +use crate::py_cached::PyCached; + +#[pyclass(subclass, module = "libcst_native.parser_config")] +#[text_signature = "(*, lines, default_newline)"] +pub struct BaseWhitespaceParserConfig { + pub lines: PyCached>, + pub default_newline: PyCached, +} + +#[pymethods] +impl BaseWhitespaceParserConfig { + #[new] + fn new(lines: &PySequence, default_newline: &PyString) -> PyResult { + // These fields will get initialized when ParserConfig.__init__ (our subclass) runs + Ok(Self { + lines: lines.extract()?, + default_newline: default_newline.extract()?, + }) + } + + #[getter] + fn get_lines(&self, py: Python) -> PyObject { + self.lines.to_object(py) + } + + #[getter] + fn get_default_newline(&self, py: Python) -> PyObject { + self.default_newline.to_object(py) + } +} + +impl BaseWhitespaceParserConfig { + /// Equivalent to `config.lines.unwrap()[line_number - 1]`, but it return a PyErr when we get + /// an index that's out of range, instead of panicing. + pub fn get_line(&self, line_number: usize) -> PyResult<&str> { + let err_fn = + || PyIndexError::new_err(format!("line number of {} is out of range", line_number)); + self.lines + .get(line_number.checked_sub(1).ok_or_else(err_fn)?) + .map(|l| &l[..]) + .ok_or_else(err_fn) + } + + /// Equivalent to `config.get_line(line_number)[column_index..]`, but it return a PyErr when + /// we get an column index that's out of range, instead of panicing. + pub fn get_line_after_column(&self, line_number: usize, column_index: usize) -> PyResult<&str> { + self.get_line(line_number)? + .get(column_index..) + .ok_or_else(|| { + PyIndexError::new_err(format!("column index of {} is out of range", column_index)) + }) + } +} + +// These fields are private and PyObject, since we don't currently care about using them from +// within rust. +#[pyclass(extends=BaseWhitespaceParserConfig, module="libcst_native.parser_config")] +#[text_signature = "(*, lines, encoding, default_indent, default_newline, has_trailing_newline, version, future_imports)"] +pub struct ParserConfig { + // lines is inherited + #[pyo3(get)] + encoding: PyObject, + #[pyo3(get)] + default_indent: PyObject, + // default_newline is inherited + #[pyo3(get)] + has_trailing_newline: PyObject, + #[pyo3(get)] + version: PyObject, + #[pyo3(get)] + future_imports: PyObject, +} + +#[pymethods] +impl ParserConfig { + #[new] + fn new( + lines: &PySequence, + encoding: PyObject, + default_indent: PyObject, + default_newline: &PyString, + has_trailing_newline: PyObject, + version: PyObject, + future_imports: PyObject, + ) -> PyResult<(Self, BaseWhitespaceParserConfig)> { + Ok(( + Self { + encoding, + default_indent, + has_trailing_newline, + version, + future_imports, + }, + BaseWhitespaceParserConfig::new(lines, default_newline)?, + )) + } +} + +/// An internal helper function used by python unit tests to compare configs. +#[pyfunction] +fn parser_config_asdict<'py>(py: Python<'py>, config: PyRef<'py, ParserConfig>) -> &'py PyDict { + let super_config: &BaseWhitespaceParserConfig = config.as_ref(); + vec![ + ("lines", super_config.lines.to_object(py)), + ("encoding", config.encoding.clone_ref(py)), + ("default_indent", config.default_indent.clone_ref(py)), + ( + "default_newline", + super_config.default_newline.to_object(py), + ), + ( + "has_trailing_newline", + config.has_trailing_newline.clone_ref(py), + ), + ("version", config.version.clone_ref(py)), + ("future_imports", config.future_imports.clone_ref(py)), + ] + .into_py_dict(py) +} + +pub fn init_module(_py: Python, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_class::()?; + m.add_function(wrap_pyfunction!(parser_config_asdict, m)?) + .unwrap(); + Ok(self) +} diff --git a/native/libcst/src/nodes/py_cached.rs b/native/libcst/src/nodes/py_cached.rs new file mode 100644 index 00000000..e8a4dfd4 --- /dev/null +++ b/native/libcst/src/nodes/py_cached.rs @@ -0,0 +1,76 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use pyo3::prelude::*; +use std::convert::AsRef; +use std::ops::Deref; + +/// An immutable wrapper around a rust type T and it's PyObject equivalent. Caches the conversion +/// to and from the PyObject. +pub struct PyCached { + native: T, + py_object: PyObject, +} + +impl PyCached +where + T: ToPyObject, +{ + pub fn new(py: Python, native: T) -> Self { + Self { + py_object: native.to_object(py), + native, + } + } +} + +impl<'source, T> FromPyObject<'source> for PyCached +where + T: FromPyObject<'source>, +{ + fn extract(ob: &'source PyAny) -> PyResult { + Python::with_gil(|py| { + Ok(PyCached { + native: ob.extract()?, + py_object: ob.to_object(py), + }) + }) + } +} + +impl IntoPy for PyCached { + fn into_py(self, _py: Python) -> PyObject { + self.py_object + } +} + +impl ToPyObject for PyCached { + fn to_object(&self, py: Python) -> PyObject { + self.py_object.clone_ref(py) + } +} + +impl AsRef for PyCached { + fn as_ref(&self) -> &T { + &self.native + } +} + +impl Deref for PyCached { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.native + } +} + +impl From for PyCached +where + T: ToPyObject, +{ + fn from(val: T) -> Self { + Python::with_gil(|py| Self::new(py, val)) + } +} diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs new file mode 100644 index 00000000..63c8ab8a --- /dev/null +++ b/native/libcst/src/nodes/statement.rs @@ -0,0 +1,1986 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::{mem::swap, rc::Rc}; + +use super::{ + inflate_helpers::adjust_parameters_trailing_whitespace, Attribute, Codegen, CodegenState, + Comma, Dot, EmptyLine, Expression, From, ImportStar, LeftParen, List, Name, NameOrAttribute, + Parameters, ParenthesizableWhitespace, RightParen, Semicolon, SimpleWhitespace, StarredElement, + Subscript, TrailingWhitespace, Tuple, +}; +use crate::{ + nodes::{ + traits::{Inflate, Result, WithComma, WithLeadingLines}, + Arg, AssignEqual, Asynchronous, AugOp, Element, ParenthesizedNode, + }, + tokenizer::{ + whitespace_parser::{ + parse_empty_lines, parse_parenthesizable_whitespace, parse_simple_whitespace, + parse_trailing_whitespace, Config, + }, + Token, + }, +}; +use libcst_derive::{Codegen, Inflate, IntoPy, ParenthesizedNode}; + +type TokenRef<'a> = Rc>; + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Eq, PartialEq, Clone, Inflate, Codegen, IntoPy)] +pub enum Statement<'a> { + Simple(SimpleStatementLine<'a>), + Compound(CompoundStatement<'a>), +} + +impl<'a> WithLeadingLines<'a> for Statement<'a> { + fn leading_lines(&mut self) -> &mut Vec> { + match self { + Self::Simple(s) => &mut s.leading_lines, + Self::Compound(c) => c.leading_lines(), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] +#[allow(clippy::large_enum_variant)] +pub enum CompoundStatement<'a> { + FunctionDef(FunctionDef<'a>), + If(If<'a>), + For(For<'a>), + While(While<'a>), + ClassDef(ClassDef<'a>), + Try(Try<'a>), + With(With<'a>), +} + +impl<'a> WithLeadingLines<'a> for CompoundStatement<'a> { + fn leading_lines(&mut self) -> &mut Vec> { + match self { + Self::FunctionDef(f) => &mut f.leading_lines, + Self::If(f) => &mut f.leading_lines, + Self::For(f) => &mut f.leading_lines, + Self::While(f) => &mut f.leading_lines, + Self::ClassDef(c) => &mut c.leading_lines, + Self::Try(t) => &mut t.leading_lines, + Self::With(w) => &mut w.leading_lines, + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] +pub enum Suite<'a> { + IndentedBlock(IndentedBlock<'a>), + SimpleStatementSuite(SimpleStatementSuite<'a>), +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct IndentedBlock<'a> { + /// Sequence of statements belonging to this indented block. + pub body: Vec>, + /// Any optional trailing comment and the final ``NEWLINE`` at the end of the line. + pub header: TrailingWhitespace<'a>, + /// A string represents a specific indentation. A ``None`` value uses the modules's + /// default indentation. This is included because indentation is allowed to be + /// inconsistent across a file, just not ambiguously. + pub indent: Option<&'a str>, + /// Any trailing comments or lines after the dedent that are owned by this indented + /// block. Statements own preceeding and same-line trailing comments, but not + /// trailing lines, so it falls on :class:`IndentedBlock` to own it. In the case + /// that a statement follows an :class:`IndentedBlock`, that statement will own the + /// comments and lines that are at the same indent as the statement, and this + /// :class:`IndentedBlock` will own the comments and lines that are indented + /// further. + pub footer: Vec>, + + pub(crate) newline_tok: TokenRef<'a>, + pub(crate) indent_tok: TokenRef<'a>, + pub(crate) dedent_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for IndentedBlock<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.header.codegen(state); + + let indent = match self.indent { + Some(i) => i, + None => state.default_indent, + }; + state.indent(indent); + + if self.body.is_empty() { + // Empty indented blocks are not syntactically valid in Python unless they + // contain a 'pass' statement, so add one here. + state.add_indent(); + state.add_token("pass"); + state.add_token(state.default_newline); + } else { + for stmt in &self.body { + // IndentedBlock is responsible for adjusting the current indentation + // level, but its children are responsible for actually adding that + // indentation to the token list. + stmt.codegen(state); + } + } + + for f in &self.footer { + f.codegen(state); + } + + state.dedent(); + } +} + +impl<'a> Inflate<'a> for IndentedBlock<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.body = self.body.inflate(config)?; + // We want to be able to only keep comments in the footer that are actually for + // this IndentedBlock. We do so by assuming that lines which are indented to the + // same level as the block itself are comments that go at the footer of the + // block. Comments that are indented to less than this indent are assumed to + // belong to the next line of code. We override the indent here because the + // dedent node's absolute indent is the resulting indentation after the dedent + // is performed. Its this way because the whitespace state for both the dedent's + // whitespace_after and the next BaseCompoundStatement's whitespace_before is + // shared. This allows us to partially parse here and parse the rest of the + // whitespace and comments on the next line, effectively making sure that + // comments are attached to the correct node. + let footer = parse_empty_lines( + config, + &mut (*self.dedent_tok).whitespace_after.borrow_mut(), + Some(self.indent_tok.whitespace_before.borrow().absolute_indent), + )?; + let header = parse_trailing_whitespace( + config, + &mut (*self.newline_tok).whitespace_before.borrow_mut(), + )?; + self.footer = footer; + self.header = header; + self.indent = self.indent_tok.relative_indent; + if self.indent == Some(config.default_indent) { + self.indent = None; + } + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct SimpleStatementSuite<'a> { + /// Sequence of small statements. All but the last statement are required to have + /// a semicolon. + pub body: Vec>, + + /// The whitespace between the colon in the parent statement and the body. + pub leading_whitespace: SimpleWhitespace<'a>, + /// Any optional trailing comment and the final ``NEWLINE`` at the end of the line. + pub trailing_whitespace: TrailingWhitespace<'a>, + + pub(crate) first_tok: TokenRef<'a>, + pub(crate) newline_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for SimpleStatementSuite<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_whitespace = parse_simple_whitespace( + config, + &mut (*self.first_tok).whitespace_before.borrow_mut(), + )?; + self.body = self.body.inflate(config)?; + self.trailing_whitespace = parse_trailing_whitespace( + config, + &mut (*self.newline_tok).whitespace_before.borrow_mut(), + )?; + Ok(self) + } +} + +fn _simple_statement_codegen<'a>( + body: &[SmallStatement<'a>], + trailing_whitespace: &TrailingWhitespace<'a>, + state: &mut CodegenState<'a>, +) { + for stmt in body { + stmt.codegen(state); + // TODO: semicolon + } + if body.is_empty() { + // Empty simple statement blocks are not syntactically valid in Python + // unless they contain a 'pass' statement, so add one here. + state.add_token("pass") + } + trailing_whitespace.codegen(state); +} + +impl<'a> Codegen<'a> for SimpleStatementSuite<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.leading_whitespace.codegen(state); + _simple_statement_codegen(&self.body, &self.trailing_whitespace, state); + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct SimpleStatementLine<'a> { + /// Sequence of small statements. All but the last statement are required to have + /// a semicolon. + pub body: Vec>, + + /// Sequence of empty lines appearing before this simple statement line. + pub leading_lines: Vec>, + /// Any optional trailing comment and the final ``NEWLINE`` at the end of the line. + pub trailing_whitespace: TrailingWhitespace<'a>, + + pub(crate) first_tok: TokenRef<'a>, + pub(crate) newline_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for SimpleStatementLine<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for line in &self.leading_lines { + line.codegen(state); + } + state.add_indent(); + _simple_statement_codegen(&self.body, &self.trailing_whitespace, state); + } +} + +impl<'a> Inflate<'a> for SimpleStatementLine<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.first_tok).whitespace_before.borrow_mut(), + None, + )?; + self.body = self.body.inflate(config)?; + self.trailing_whitespace = parse_trailing_whitespace( + config, + &mut (*self.newline_tok).whitespace_before.borrow_mut(), + )?; + Ok(self) + } +} + +#[allow(dead_code, clippy::large_enum_variant)] +#[derive(Debug, Eq, PartialEq, Clone, Codegen, Inflate, IntoPy)] +pub enum SmallStatement<'a> { + Pass(Pass<'a>), + Break(Break<'a>), + Continue(Continue<'a>), + Return(Return<'a>), + Expr(Expr<'a>), + Assert(Assert<'a>), + Import(Import<'a>), + ImportFrom(ImportFrom<'a>), + Assign(Assign<'a>), + AnnAssign(AnnAssign<'a>), + Raise(Raise<'a>), + Global(Global<'a>), + Nonlocal(Nonlocal<'a>), + AugAssign(AugAssign<'a>), + Del(Del<'a>), +} + +impl<'a> SmallStatement<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + match self { + Self::Pass(p) => Self::Pass(p.with_semicolon(semicolon)), + Self::Break(p) => Self::Break(p.with_semicolon(semicolon)), + Self::Continue(p) => Self::Continue(p.with_semicolon(semicolon)), + Self::Expr(p) => Self::Expr(p.with_semicolon(semicolon)), + Self::Import(i) => Self::Import(i.with_semicolon(semicolon)), + Self::ImportFrom(i) => Self::ImportFrom(i.with_semicolon(semicolon)), + Self::Assign(a) => Self::Assign(a.with_semicolon(semicolon)), + Self::AnnAssign(a) => Self::AnnAssign(a.with_semicolon(semicolon)), + Self::Return(r) => Self::Return(r.with_semicolon(semicolon)), + Self::Assert(a) => Self::Assert(a.with_semicolon(semicolon)), + Self::Raise(r) => Self::Raise(r.with_semicolon(semicolon)), + Self::Global(g) => Self::Global(g.with_semicolon(semicolon)), + Self::Nonlocal(l) => Self::Nonlocal(l.with_semicolon(semicolon)), + Self::AugAssign(a) => Self::AugAssign(a.with_semicolon(semicolon)), + Self::Del(d) => Self::Del(d.with_semicolon(semicolon)), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Pass<'a> { + pub semicolon: Option>, +} +impl<'a> Pass<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon } + } +} +impl<'a> Codegen<'a> for Pass<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("pass"); + self.semicolon.codegen(state); + } +} +impl<'a> Inflate<'a> for Pass<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Break<'a> { + pub semicolon: Option>, +} +impl<'a> Break<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon } + } +} +impl<'a> Codegen<'a> for Break<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("break"); + self.semicolon.codegen(state); + } +} +impl<'a> Inflate<'a> for Break<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Continue<'a> { + pub semicolon: Option>, +} +impl<'a> Continue<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon } + } +} +impl<'a> Codegen<'a> for Continue<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("continue"); + self.semicolon.codegen(state); + } +} +impl<'a> Inflate<'a> for Continue<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Expr<'a> { + pub value: Expression<'a>, + pub semicolon: Option>, +} +impl<'a> Expr<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} +impl<'a> Codegen<'a> for Expr<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.value.codegen(state); + self.semicolon.codegen(state); + } +} +impl<'a> Inflate<'a> for Expr<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.value = self.value.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Assign<'a> { + pub targets: Vec>, + pub value: Expression<'a>, + pub semicolon: Option>, +} + +impl<'a> Codegen<'a> for Assign<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for target in &self.targets { + target.codegen(state); + } + self.value.codegen(state); + if let Some(semi) = &self.semicolon { + semi.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for Assign<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.targets = self.targets.inflate(config)?; + self.value = self.value.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> Assign<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct AssignTarget<'a> { + pub target: AssignTargetExpression<'a>, + pub whitespace_before_equal: SimpleWhitespace<'a>, + pub whitespace_after_equal: SimpleWhitespace<'a>, + + pub(crate) equal_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for AssignTarget<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.target.codegen(state); + self.whitespace_before_equal.codegen(state); + state.add_token("="); + self.whitespace_after_equal.codegen(state); + } +} + +impl<'a> Inflate<'a> for AssignTarget<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.target = self.target.inflate(config)?; + self.whitespace_before_equal = parse_simple_whitespace( + config, + &mut (*self.equal_tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after_equal = + parse_simple_whitespace(config, &mut (*self.equal_tok).whitespace_after.borrow_mut())?; + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate, IntoPy)] +pub enum AssignTargetExpression<'a> { + Name(Name<'a>), + Attribute(Attribute<'a>), + StarredElement(StarredElement<'a>), + Tuple(Tuple<'a>), + List(List<'a>), + Subscript(Subscript<'a>), +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Import<'a> { + pub names: Vec>, + pub semicolon: Option>, + pub whitespace_after_import: SimpleWhitespace<'a>, + + pub(crate) import_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Import<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("import"); + self.whitespace_after_import.codegen(state); + for (i, name) in self.names.iter().enumerate() { + name.codegen(state); + if name.comma.is_none() && i < self.names.len() - 1 { + state.add_token(", "); + } + } + if let Some(semi) = &self.semicolon { + semi.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for Import<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_import = parse_simple_whitespace( + config, + &mut (*self.import_tok).whitespace_after.borrow_mut(), + )?; + self.names = self.names.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> Import<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct ImportFrom<'a> { + #[no_py_default] + pub module: Option>, + pub names: ImportNames<'a>, + pub relative: Vec>, + pub lpar: Option>, + pub rpar: Option>, + pub semicolon: Option>, + pub whitespace_after_from: SimpleWhitespace<'a>, + pub whitespace_before_import: SimpleWhitespace<'a>, + pub whitespace_after_import: SimpleWhitespace<'a>, + + pub(crate) from_tok: TokenRef<'a>, + pub(crate) import_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for ImportFrom<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("from"); + self.whitespace_after_from.codegen(state); + for dot in &self.relative { + dot.codegen(state); + } + if let Some(module) = &self.module { + module.codegen(state); + } + self.whitespace_before_import.codegen(state); + state.add_token("import"); + self.whitespace_after_import.codegen(state); + if let Some(lpar) = &self.lpar { + lpar.codegen(state); + } + self.names.codegen(state); + if let Some(rpar) = &self.rpar { + rpar.codegen(state); + } + + if let Some(semi) = &self.semicolon { + semi.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for ImportFrom<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_from = + parse_simple_whitespace(config, &mut (*self.from_tok).whitespace_after.borrow_mut())?; + + self.module = self.module.inflate(config)?; + + self.whitespace_after_import = parse_simple_whitespace( + config, + &mut (*self.import_tok).whitespace_after.borrow_mut(), + )?; + + self.relative = inflate_dots(self.relative, config)?; + + if !self.relative.is_empty() && self.module.is_none() { + // For relative-only imports relocate the space after the final dot to be owned + // by the import token. + if let Some(Dot { + whitespace_after: ParenthesizableWhitespace::SimpleWhitespace(dot_ws), + .. + }) = self.relative.last_mut() + { + swap(dot_ws, &mut self.whitespace_before_import); + } + } else { + self.whitespace_before_import = parse_simple_whitespace( + config, + &mut (*self.import_tok).whitespace_before.borrow_mut(), + )?; + } + + self.lpar = self.lpar.inflate(config)?; + self.names = self.names.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + + self.semicolon = self.semicolon.inflate(config)?; + + Ok(self) + } +} + +fn inflate_dots<'a>(dots: Vec>, config: &Config<'a>) -> Result>> { + let mut ret: Vec> = vec![]; + let mut last_tok: Option> = None; + for mut dot in dots { + if let Some(last_tokref) = &last_tok { + // Consecutive dots having the same Token can only happen if `...` was + // parsed as a single ELLIPSIS token. In this case the token's + // whitespace_before belongs to the first dot, but the whitespace_after is + // moved to the 3rd dot (by swapping it twice) + if last_tokref.start_pos == dot.tok.start_pos { + swap( + &mut ret.last_mut().unwrap().whitespace_after, + &mut dot.whitespace_after, + ); + ret.push(dot); + continue; + } + } + last_tok = Some(dot.tok.clone()); + ret.push(dot.inflate(config)?); + } + Ok(ret) +} + +impl<'a> ImportFrom<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct ImportAlias<'a> { + pub name: NameOrAttribute<'a>, + pub asname: Option>, + pub comma: Option>, +} + +impl<'a> Inflate<'a> for ImportAlias<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.name = self.name.inflate(config)?; + self.asname = self.asname.inflate(config)?; + self.comma = self.comma.inflate(config)?; + Ok(self) + } +} + +impl<'a> WithComma<'a> for ImportAlias<'a> { + fn with_comma(self, comma: Comma<'a>) -> ImportAlias<'a> { + let comma = Some(comma); + Self { comma, ..self } + } +} + +impl<'a> Codegen<'a> for ImportAlias<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.name.codegen(state); + if let Some(asname) = &self.asname { + asname.codegen(state); + } + if let Some(comma) = &self.comma { + comma.codegen(state); + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct AsName<'a> { + pub name: AssignTargetExpression<'a>, + pub whitespace_before_as: ParenthesizableWhitespace<'a>, + pub whitespace_after_as: ParenthesizableWhitespace<'a>, + + pub(crate) as_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for AsName<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before_as.codegen(state); + state.add_token("as"); + self.whitespace_after_as.codegen(state); + self.name.codegen(state); + } +} + +impl<'a> Inflate<'a> for AsName<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before_as = parse_parenthesizable_whitespace( + config, + &mut (*self.as_tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after_as = parse_parenthesizable_whitespace( + config, + &mut (*self.as_tok).whitespace_after.borrow_mut(), + )?; + self.name = self.name.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Inflate, IntoPy)] +pub enum ImportNames<'a> { + Star(ImportStar), + Aliases(Vec>), +} + +impl<'a> Codegen<'a> for ImportNames<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + match self { + Self::Star(s) => s.codegen(state), + Self::Aliases(aliases) => { + for (i, alias) in aliases.iter().enumerate() { + alias.codegen(state); + if alias.comma.is_none() && i < aliases.len() - 1 { + state.add_token(", "); + } + } + } + } + } +} + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub struct FunctionDef<'a> { + pub name: Name<'a>, + pub params: Parameters<'a>, + pub body: Suite<'a>, + pub decorators: Vec>, + pub returns: Option>, + pub asynchronous: Option>, + pub leading_lines: Vec>, + pub lines_after_decorators: Vec>, + pub whitespace_after_def: SimpleWhitespace<'a>, + pub whitespace_after_name: SimpleWhitespace<'a>, + pub whitespace_before_params: ParenthesizableWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) async_tok: Option>, + pub(crate) def_tok: TokenRef<'a>, + pub(crate) open_paren_tok: TokenRef<'a>, + pub(crate) close_paren_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> FunctionDef<'a> { + pub fn with_decorators(self, decorators: Vec>) -> Self { + Self { decorators, ..self } + } +} + +impl<'a> Codegen<'a> for FunctionDef<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for l in &self.leading_lines { + l.codegen(state); + } + for dec in self.decorators.iter() { + dec.codegen(state); + } + for l in &self.lines_after_decorators { + l.codegen(state); + } + state.add_indent(); + + if let Some(asy) = &self.asynchronous { + asy.codegen(state); + } + state.add_token("def"); + self.whitespace_after_def.codegen(state); + self.name.codegen(state); + self.whitespace_after_name.codegen(state); + state.add_token("("); + self.whitespace_before_params.codegen(state); + self.params.codegen(state); + state.add_token(")"); + + if let Some(ann) = &self.returns { + ann.codegen(state, "->"); + } + + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + } +} + +impl<'a> Inflate<'a> for FunctionDef<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.decorators = self.decorators.inflate(config)?; + let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { + let whitespace_after = + parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; + ( + Some(Asynchronous { whitespace_after }), + Some(parse_empty_lines( + config, + &mut asy.whitespace_before.borrow_mut(), + None, + )?), + ) + } else { + (None, None) + }; + + self.asynchronous = asynchronous; + let leading_lines = if let Some(ll) = leading_lines { + ll + } else { + parse_empty_lines( + config, + &mut (*self.def_tok).whitespace_before.borrow_mut(), + None, + )? + }; + + self.leading_lines = leading_lines; + if let Some(dec) = self.decorators.first_mut() { + swap(&mut self.lines_after_decorators, &mut self.leading_lines); + swap(&mut dec.leading_lines, &mut self.leading_lines); + } + + self.whitespace_after_def = + parse_simple_whitespace(config, &mut (*self.def_tok).whitespace_after.borrow_mut())?; + + self.name = self.name.inflate(config)?; + self.whitespace_after_name = parse_simple_whitespace( + config, + &mut (*self.open_paren_tok).whitespace_before.borrow_mut(), + )?; + + self.whitespace_before_params = parse_parenthesizable_whitespace( + config, + &mut (*self.open_paren_tok).whitespace_after.borrow_mut(), + )?; + self.params = self.params.inflate(config)?; + adjust_parameters_trailing_whitespace(config, &mut self.params, &self.close_paren_tok)?; + + self.returns = self.returns.inflate(config)?; + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + + self.body = self.body.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub struct Decorator<'a> { + pub decorator: Expression<'a>, + pub leading_lines: Vec>, + pub whitespace_after_at: SimpleWhitespace<'a>, + pub trailing_whitespace: TrailingWhitespace<'a>, + + pub(crate) at_tok: TokenRef<'a>, + pub(crate) newline_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Decorator<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in self.leading_lines.iter() { + ll.codegen(state); + } + state.add_indent(); + state.add_token("@"); + self.whitespace_after_at.codegen(state); + self.decorator.codegen(state); + self.trailing_whitespace.codegen(state); + } +} + +impl<'a> Inflate<'a> for Decorator<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.at_tok).whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_after_at = + parse_simple_whitespace(config, &mut (*self.at_tok).whitespace_after.borrow_mut())?; + self.decorator = self.decorator.inflate(config)?; + self.trailing_whitespace = parse_trailing_whitespace( + config, + &mut (*self.newline_tok).whitespace_before.borrow_mut(), + )?; + Ok(self) + } +} + +impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct If<'a> { + /// The expression that, when evaluated, should give us a truthy value + pub test: Expression<'a>, + // The body of this compound statement. + pub body: Suite<'a>, + + /// An optional ``elif`` or ``else`` clause. ``If`` signifies an ``elif`` block. + pub orelse: Option>>, + + /// Sequence of empty lines appearing before this compound statement line. + pub leading_lines: Vec>, + + /// The whitespace appearing after the ``if`` keyword but before the test + /// expression. + pub whitespace_before_test: SimpleWhitespace<'a>, + + /// The whitespace appearing after the test expression but before the colon. + pub whitespace_after_test: SimpleWhitespace<'a>, + + /// Signifies if this instance represents an ``elif`` or an ``if`` block. + #[skip_py] + pub is_elif: bool, + + pub(crate) if_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for If<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for l in &self.leading_lines { + l.codegen(state); + } + state.add_indent(); + + state.add_token(if self.is_elif { "elif" } else { "if" }); + self.whitespace_before_test.codegen(state); + self.test.codegen(state); + self.whitespace_after_test.codegen(state); + state.add_token(":"); + self.body.codegen(state); + if let Some(orelse) = &self.orelse { + orelse.codegen(state) + } + } +} + +impl<'a> Inflate<'a> for If<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.if_tok).whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_before_test = + parse_simple_whitespace(config, &mut (*self.if_tok).whitespace_after.borrow_mut())?; + self.test = self.test.inflate(config)?; + self.whitespace_after_test = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + self.body = self.body.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; + + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] +pub enum OrElse<'a> { + Elif(If<'a>), + Else(Else<'a>), +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Else<'a> { + pub body: Suite<'a>, + /// Sequence of empty lines appearing before this compound statement line. + pub leading_lines: Vec>, + /// The whitespace appearing after the ``else`` keyword but before the colon. + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) else_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Else<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for l in &self.leading_lines { + l.codegen(state); + } + state.add_indent(); + + state.add_token("else"); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + } +} + +impl<'a> Inflate<'a> for Else<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.else_tok).whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + self.body = self.body.inflate(config)?; + + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Annotation<'a> { + pub annotation: Expression<'a>, + pub whitespace_before_indicator: Option>, + pub whitespace_after_indicator: ParenthesizableWhitespace<'a>, + + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Annotation<'a> { + pub fn codegen(&self, state: &mut CodegenState<'a>, default_indicator: &'a str) { + if let Some(ws) = &self.whitespace_before_indicator { + ws.codegen(state); + } else if default_indicator == "->" { + state.add_token(" "); + } else { + panic!("Variable annotation but whitespace is None"); + } + + state.add_token(default_indicator); + self.whitespace_after_indicator.codegen(state); + self.annotation.codegen(state); + } +} + +impl<'a> Inflate<'a> for Annotation<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before_indicator = Some(parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?); + self.whitespace_after_indicator = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + self.annotation = self.annotation.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct AnnAssign<'a> { + pub target: AssignTargetExpression<'a>, + pub annotation: Annotation<'a>, + pub value: Option>, + pub equal: Option>, + pub semicolon: Option>, +} + +impl<'a> Codegen<'a> for AnnAssign<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.target.codegen(state); + self.annotation.codegen(state, ":"); + if let Some(eq) = &self.equal { + eq.codegen(state); + } else if self.value.is_some() { + state.add_token(" = "); + } + if let Some(value) = &self.value { + value.codegen(state); + } + + if let Some(semi) = &self.semicolon { + semi.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for AnnAssign<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.target = self.target.inflate(config)?; + self.annotation = self.annotation.inflate(config)?; + self.value = self.value.inflate(config)?; + self.equal = self.equal.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> AnnAssign<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Return<'a> { + pub value: Option>, + pub whitespace_after_return: Option>, + pub semicolon: Option>, + + pub(crate) return_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Return<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("return"); + if let Some(ws) = &self.whitespace_after_return { + ws.codegen(state); + } else if self.value.is_some() { + state.add_token(" "); + } + + if let Some(val) = &self.value { + val.codegen(state); + } + if let Some(semi) = &self.semicolon { + semi.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for Return<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + if self.value.is_some() { + self.whitespace_after_return = Some(parse_simple_whitespace( + config, + &mut (*self.return_tok).whitespace_after.borrow_mut(), + )?); + } else { + // otherwise space is owned by semicolon or small statement + // whitespace is not None to preserve a quirk of the pure python parser + self.whitespace_after_return = Some(Default::default()) + } + self.value = self.value.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> Return<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Assert<'a> { + pub test: Expression<'a>, + pub msg: Option>, + pub comma: Option>, + pub whitespace_after_assert: SimpleWhitespace<'a>, + pub semicolon: Option>, + + pub(crate) assert_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Assert<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("assert"); + self.whitespace_after_assert.codegen(state); + self.test.codegen(state); + if let Some(comma) = &self.comma { + comma.codegen(state); + } else if self.msg.is_some() { + state.add_token(", "); + } + if let Some(msg) = &self.msg { + msg.codegen(state); + } + if let Some(semi) = &self.semicolon { + semi.codegen(state); + } + } +} +impl<'a> Inflate<'a> for Assert<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_assert = parse_simple_whitespace( + config, + &mut (*self.assert_tok).whitespace_after.borrow_mut(), + )?; + + self.test = self.test.inflate(config)?; + self.comma = self.comma.inflate(config)?; + self.msg = self.msg.inflate(config)?; + + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> Assert<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Raise<'a> { + pub exc: Option>, + pub cause: Option>, + pub whitespace_after_raise: Option>, + pub semicolon: Option>, + + pub(crate) raise_tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Raise<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + if self.exc.is_some() { + self.whitespace_after_raise = Some(parse_simple_whitespace( + config, + &mut (*self.raise_tok).whitespace_after.borrow_mut(), + )?); + } + + self.exc = self.exc.inflate(config)?; + self.cause = self.cause.inflate(config)?; + if self.exc.is_none() { + if let Some(cause) = self.cause.as_mut() { + // in `raise from`, `raise` owns the shared whitespace + cause.whitespace_before_from = None; + } + } + self.semicolon = self.semicolon.inflate(config)?; + + Ok(self) + } +} + +impl<'a> Codegen<'a> for Raise<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("raise"); + if let Some(ws) = &self.whitespace_after_raise { + ws.codegen(state); + } else if self.exc.is_some() { + state.add_token(" "); + } + + if let Some(exc) = &self.exc { + exc.codegen(state); + } + + if let Some(cause) = &self.cause { + cause.codegen(state, " "); + } + + if let Some(semi) = &self.semicolon { + semi.codegen(state); + } + } +} + +impl<'a> Raise<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct NameItem<'a> { + pub name: Name<'a>, + pub comma: Option>, +} + +impl<'a> Inflate<'a> for NameItem<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.name = self.name.inflate(config)?; + self.comma = self.comma.inflate(config)?; + Ok(self) + } +} + +impl<'a> NameItem<'a> { + fn codegen(&self, state: &mut CodegenState<'a>, default_comma: bool) { + self.name.codegen(state); + if let Some(comma) = &self.comma { + comma.codegen(state); + } else if default_comma { + state.add_token(", "); + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Global<'a> { + pub names: Vec>, + pub whitespace_after_global: SimpleWhitespace<'a>, + pub semicolon: Option>, + + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Global<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_global = + parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; + self.names = self.names.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Global<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("global"); + self.whitespace_after_global.codegen(state); + let len = self.names.len(); + for (i, name) in self.names.iter().enumerate() { + name.codegen(state, i + 1 != len); + } + + if let Some(semicolon) = &self.semicolon { + semicolon.codegen(state); + } + } +} + +impl<'a> Global<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Nonlocal<'a> { + pub names: Vec>, + pub whitespace_after_nonlocal: SimpleWhitespace<'a>, + pub semicolon: Option>, + + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Nonlocal<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_nonlocal = + parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; + self.names = self.names.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Nonlocal<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("nonlocal"); + self.whitespace_after_nonlocal.codegen(state); + let len = self.names.len(); + for (i, name) in self.names.iter().enumerate() { + name.codegen(state, i + 1 != len); + } + + if let Some(semicolon) = &self.semicolon { + semicolon.codegen(state); + } + } +} + +impl<'a> Nonlocal<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct For<'a> { + pub target: AssignTargetExpression<'a>, + pub iter: Expression<'a>, + pub body: Suite<'a>, + pub orelse: Option>, + pub asynchronous: Option>, + + pub leading_lines: Vec>, + pub whitespace_after_for: SimpleWhitespace<'a>, + pub whitespace_before_in: SimpleWhitespace<'a>, + pub whitespace_after_in: SimpleWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) async_tok: Option>, + pub(crate) for_tok: TokenRef<'a>, + pub(crate) in_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for For<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + state.add_indent(); + + if let Some(asy) = &self.asynchronous { + asy.codegen(state); + } + state.add_token("for"); + self.whitespace_after_for.codegen(state); + self.target.codegen(state); + self.whitespace_before_in.codegen(state); + state.add_token("in"); + self.whitespace_after_in.codegen(state); + self.iter.codegen(state); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + if let Some(e) = &self.orelse { + e.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for For<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { + let whitespace_after = + parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; + ( + Some(Asynchronous { whitespace_after }), + Some(parse_empty_lines( + config, + &mut asy.whitespace_before.borrow_mut(), + None, + )?), + ) + } else { + (None, None) + }; + self.leading_lines = if let Some(ll) = leading_lines { + ll + } else { + parse_empty_lines( + config, + &mut (*self.for_tok).whitespace_before.borrow_mut(), + None, + )? + }; + self.asynchronous = asynchronous; + self.whitespace_after_for = + parse_simple_whitespace(config, &mut (*self.for_tok).whitespace_after.borrow_mut())?; + self.target = self.target.inflate(config)?; + self.whitespace_before_in = + parse_simple_whitespace(config, &mut (*self.in_tok).whitespace_before.borrow_mut())?; + self.whitespace_after_in = + parse_simple_whitespace(config, &mut (*self.in_tok).whitespace_after.borrow_mut())?; + self.iter = self.iter.inflate(config)?; + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + + self.body = self.body.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; + + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct While<'a> { + pub test: Expression<'a>, + pub body: Suite<'a>, + pub orelse: Option>, + pub leading_lines: Vec>, + pub whitespace_after_while: SimpleWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) while_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for While<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + state.add_indent(); + + state.add_token("while"); + self.whitespace_after_while.codegen(state); + self.test.codegen(state); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + if let Some(orelse) = &self.orelse { + orelse.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for While<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.while_tok).whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_after_while = + parse_simple_whitespace(config, &mut (*self.while_tok).whitespace_after.borrow_mut())?; + self.test = self.test.inflate(config)?; + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + self.body = self.body.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; + + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct ClassDef<'a> { + pub name: Name<'a>, + pub body: Suite<'a>, + pub bases: Vec>, + pub keywords: Vec>, + pub decorators: Vec>, + pub lpar: Option>, + pub rpar: Option>, + pub leading_lines: Vec>, + pub lines_after_decorators: Vec>, + pub whitespace_after_class: SimpleWhitespace<'a>, + pub whitespace_after_name: SimpleWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) class_tok: TokenRef<'a>, + pub(crate) parens_tok: Option<(TokenRef<'a>, TokenRef<'a>)>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for ClassDef<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + for dec in &self.decorators { + dec.codegen(state); + } + for lad in &self.lines_after_decorators { + lad.codegen(state); + } + state.add_indent(); + + state.add_token("class"); + self.whitespace_after_class.codegen(state); + self.name.codegen(state); + self.whitespace_after_name.codegen(state); + + let need_parens = !self.bases.is_empty() || !self.keywords.is_empty(); + + if let Some(lpar) = &self.lpar { + lpar.codegen(state); + } else if need_parens { + state.add_token("("); + } + let args = self.bases.iter().chain(self.keywords.iter()); + let len = self.bases.len() + self.keywords.len(); + for (i, arg) in args.enumerate() { + arg.codegen(state, i + 1 < len); + } + + if let Some(rpar) = &self.rpar { + rpar.codegen(state); + } else if need_parens { + state.add_token(")"); + } + + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + } +} + +impl<'a> Inflate<'a> for ClassDef<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.class_tok).whitespace_before.borrow_mut(), + None, + )?; + self.decorators = self.decorators.inflate(config)?; + if let Some(dec) = self.decorators.first_mut() { + swap(&mut self.lines_after_decorators, &mut self.leading_lines); + swap(&mut dec.leading_lines, &mut self.leading_lines); + } + + self.whitespace_after_class = + parse_simple_whitespace(config, &mut (*self.class_tok).whitespace_after.borrow_mut())?; + self.name = self.name.inflate(config)?; + + if let Some((lpar_tok, _)) = self.parens_tok.as_mut() { + self.whitespace_after_name = + parse_simple_whitespace(config, &mut lpar_tok.whitespace_before.borrow_mut())?; + self.lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; + self.bases = self.bases.inflate(config)?; + self.keywords = self.keywords.inflate(config)?; + self.rpar = self.rpar.map(|lpar| lpar.inflate(config)).transpose()?; + // TODO: set whitespace_after_arg for last arg? + } + + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + self.body = self.body.inflate(config)?; + + Ok(self) + } +} + +impl<'a> ClassDef<'a> { + pub fn with_decorators(self, decorators: Vec>) -> Self { + Self { decorators, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Finally<'a> { + pub body: Suite<'a>, + pub leading_lines: Vec>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) finally_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Finally<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + state.add_indent(); + + state.add_token("finally"); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + } +} + +impl<'a> Inflate<'a> for Finally<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.finally_tok).whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + self.body = self.body.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct ExceptHandler<'a> { + pub body: Suite<'a>, + pub r#type: Option>, + pub name: Option>, + pub leading_lines: Vec>, + pub whitespace_after_except: SimpleWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) except_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for ExceptHandler<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + state.add_indent(); + + state.add_token("except"); + self.whitespace_after_except.codegen(state); + if let Some(t) = &self.r#type { + t.codegen(state); + } + if let Some(n) = &self.name { + n.codegen(state); + } + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + } +} + +impl<'a> Inflate<'a> for ExceptHandler<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.except_tok).whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_after_except = parse_simple_whitespace( + config, + &mut (*self.except_tok).whitespace_after.borrow_mut(), + )?; + + self.r#type = self.r#type.inflate(config)?; + self.name = self.name.inflate(config)?; + if self.name.is_some() { + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + } + + self.body = self.body.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Try<'a> { + pub body: Suite<'a>, + pub handlers: Vec>, + pub orelse: Option>, + pub finalbody: Option>, + pub leading_lines: Vec>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) try_tok: TokenRef<'a>, + // colon_tok unnecessary +} + +impl<'a> Codegen<'a> for Try<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + state.add_indent(); + state.add_token("try"); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + for h in &self.handlers { + h.codegen(state); + } + if let Some(e) = &self.orelse { + e.codegen(state); + } + if let Some(f) = &self.finalbody { + f.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for Try<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.try_tok).whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_before_colon = + parse_simple_whitespace(config, &mut (*self.try_tok).whitespace_after.borrow_mut())?; + self.body = self.body.inflate(config)?; + self.handlers = self.handlers.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; + self.finalbody = self.finalbody.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct AugAssign<'a> { + pub target: AssignTargetExpression<'a>, + pub operator: AugOp<'a>, + pub value: Expression<'a>, + pub semicolon: Option>, +} + +impl<'a> Inflate<'a> for AugAssign<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.target = self.target.inflate(config)?; + self.operator = self.operator.inflate(config)?; + self.value = self.value.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for AugAssign<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.target.codegen(state); + self.operator.codegen(state); + self.value.codegen(state); + + if let Some(s) = &self.semicolon { + s.codegen(state); + } + } +} + +impl<'a> AugAssign<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct WithItem<'a> { + pub item: Expression<'a>, + pub asname: Option>, + pub comma: Option>, +} + +impl<'a> Codegen<'a> for WithItem<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.item.codegen(state); + if let Some(n) = &self.asname { + n.codegen(state); + } + if let Some(c) = &self.comma { + c.codegen(state); + } + } +} + +impl<'a> WithComma<'a> for WithItem<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { + Self { + comma: Some(comma), + ..self + } + } +} + +impl<'a> Inflate<'a> for WithItem<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.item = self.item.inflate(config)?; + self.asname = self.asname.inflate(config)?; + self.comma = self.comma.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct With<'a> { + pub items: Vec>, + pub body: Suite<'a>, + pub asynchronous: Option>, + pub leading_lines: Vec>, + pub whitespace_after_with: SimpleWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) async_tok: Option>, + pub(crate) with_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for With<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + state.add_indent(); + + if let Some(asy) = &self.asynchronous { + asy.codegen(state); + } + state.add_token("with"); + self.whitespace_after_with.codegen(state); + let len = self.items.len(); + for (i, item) in self.items.iter().enumerate() { + item.codegen(state); + if item.comma.is_none() && i + 1 < len { + state.add_token(", "); + } + } + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + } +} + +impl<'a> Inflate<'a> for With<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { + let whitespace_after = + parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; + ( + Some(Asynchronous { whitespace_after }), + Some(parse_empty_lines( + config, + &mut asy.whitespace_before.borrow_mut(), + None, + )?), + ) + } else { + (None, None) + }; + + self.asynchronous = asynchronous; + + self.leading_lines = if let Some(ll) = leading_lines { + ll + } else { + parse_empty_lines( + config, + &mut (*self.with_tok).whitespace_before.borrow_mut(), + None, + )? + }; + + self.whitespace_after_with = + parse_simple_whitespace(config, &mut (*self.with_tok).whitespace_after.borrow_mut())?; + self.items = self.items.inflate(config)?; + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut (*self.colon_tok).whitespace_before.borrow_mut(), + )?; + self.body = self.body.inflate(config)?; + + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate, IntoPy)] +pub enum DelTargetExpression<'a> { + Name(Name<'a>), + Attribute(Attribute<'a>), + Tuple(Tuple<'a>), + List(List<'a>), + Subscript(Subscript<'a>), +} + +impl<'a> std::convert::From> for Expression<'a> { + fn from(d: DelTargetExpression<'a>) -> Self { + match d { + DelTargetExpression::Attribute(a) => Expression::Attribute(a), + DelTargetExpression::List(l) => Expression::List(l), + DelTargetExpression::Name(n) => Expression::Name(n), + DelTargetExpression::Subscript(s) => Expression::Subscript(s), + DelTargetExpression::Tuple(t) => Expression::Tuple(t), + } + } +} +impl<'a> std::convert::From> for Element<'a> { + fn from(d: DelTargetExpression<'a>) -> Element { + Element::Simple { + value: d.into(), + comma: None, + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Del<'a> { + pub target: DelTargetExpression<'a>, + pub whitespace_after_del: SimpleWhitespace<'a>, + pub semicolon: Option>, + + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for Del<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_del = + parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; + self.target = self.target.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for Del<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("del"); + self.whitespace_after_del.codegen(state); + self.target.codegen(state); + if let Some(semi) = &self.semicolon { + semi.codegen(state); + } + } +} + +impl<'a> Del<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} diff --git a/native/libcst/src/nodes/test_utils.rs b/native/libcst/src/nodes/test_utils.rs new file mode 100644 index 00000000..6a462c8d --- /dev/null +++ b/native/libcst/src/nodes/test_utils.rs @@ -0,0 +1,42 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use pyo3::prelude::*; + +py_import!("libcst._nodes.deep_equals", "deep_equals", get_deep_equals); + +pub fn repr_or_panic(py: Python, value: T) -> String +where + T: ToPyObject, +{ + value + .to_object(py) + .as_ref(py) + .repr() + .expect("failed to call repr") + .extract() + .expect("repr should've returned str") +} + +pub fn py_assert_deep_equals(py: Python, left: L, right: R) +where + L: ToPyObject, + R: ToPyObject, +{ + let (left, right) = (left.to_object(py), right.to_object(py)); + let equals = get_deep_equals(py) + .expect("failed to import deep_equals") + .call1((&left, &right)) + .expect("failed to call deep_equals") + .extract::() + .expect("deep_equals should return a bool"); + if !equals { + panic!( + "assertion failed: {} was not deeply equal to {}", + repr_or_panic(py, &left), + repr_or_panic(py, &right), + ); + } +} diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs new file mode 100644 index 00000000..155ae17a --- /dev/null +++ b/native/libcst/src/nodes/traits.rs @@ -0,0 +1,62 @@ +use crate::{ + tokenizer::whitespace_parser::{Config, WhitespaceError}, + Codegen, CodegenState, Comma, EmptyLine, LeftParen, RightParen, +}; + +pub trait WithComma<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self; +} + +pub trait ParenthesizedNode<'a> { + fn lpar(&self) -> &Vec>; + fn rpar(&self) -> &Vec>; + + fn parenthesize(&self, state: &mut CodegenState<'a>, f: F) + where + F: FnOnce(&mut CodegenState<'a>), + { + for lpar in self.lpar() { + lpar.codegen(state); + } + f(state); + for rpar in self.rpar() { + rpar.codegen(state); + } + } + + fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self; +} + +pub trait WithLeadingLines<'a> { + fn leading_lines(&mut self) -> &mut Vec>; +} + +pub type Result = std::result::Result; + +pub trait Inflate<'a> +where + Self: Sized, +{ + fn inflate(self, config: &Config<'a>) -> Result; +} + +impl<'a, T: Inflate<'a>> Inflate<'a> for Option { + fn inflate(self, config: &Config<'a>) -> Result { + self.map(|x| x.inflate(config)).transpose() + } +} + +impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box { + fn inflate(self, config: &Config<'a>) -> Result { + match (*self).inflate(config) { + Ok(a) => Ok(Box::new(a)), + Err(e) => Err(e), + } + } +} + +impl<'a, T: Inflate<'a>> Inflate<'a> for Vec { + fn inflate(self, config: &Config<'a>) -> Result { + self.into_iter().map(|item| item.inflate(config)).collect() + } +} diff --git a/native/libcst/src/nodes/whitespace.rs b/native/libcst/src/nodes/whitespace.rs new file mode 100644 index 00000000..89f2800e --- /dev/null +++ b/native/libcst/src/nodes/whitespace.rs @@ -0,0 +1,167 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use libcst_derive::IntoPy; + +use super::{Codegen, CodegenState}; + +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +pub struct SimpleWhitespace<'a>(pub &'a str); + +impl<'a> Codegen<'a> for SimpleWhitespace<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token(self.0); + } +} + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub struct Comment<'a>(pub &'a str); + +impl<'a> Default for Comment<'a> { + fn default() -> Self { + Self("#") + } +} + +impl<'a> Codegen<'a> for Comment<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token(self.0); + } +} + +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +pub struct Newline<'a>(pub Option<&'a str>, pub Fakeness); + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum Fakeness { + Fake, + Real, +} + +impl Default for Fakeness { + fn default() -> Self { + Self::Real + } +} + +impl<'a> Codegen<'a> for Newline<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + if let Fakeness::Fake = self.1 { + return; + } + if let Some(value) = self.0 { + state.add_token(value); + } else { + state.add_token(state.default_newline); + } + } +} + +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +pub struct TrailingWhitespace<'a> { + pub whitespace: SimpleWhitespace<'a>, + pub comment: Option>, + pub newline: Newline<'a>, +} + +impl<'a> Codegen<'a> for TrailingWhitespace<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace.codegen(state); + if let Some(comment) = &self.comment { + comment.codegen(state); + } + self.newline.codegen(state); + } +} + +#[derive(Debug, Clone, PartialEq, Eq, IntoPy)] +pub struct EmptyLine<'a> { + pub indent: bool, + pub whitespace: SimpleWhitespace<'a>, + pub comment: Option>, + pub newline: Newline<'a>, +} + +impl<'a> Codegen<'a> for EmptyLine<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + if self.indent { + state.add_indent() + } + self.whitespace.codegen(state); + if let Some(comment) = &self.comment { + comment.codegen(state); + } + self.newline.codegen(state); + } +} + +impl<'a> Default for EmptyLine<'a> { + fn default() -> Self { + Self { + indent: true, + whitespace: Default::default(), + comment: Default::default(), + newline: Default::default(), + } + } +} + +impl<'a> EmptyLine<'a> { + pub fn new( + indent: bool, + whitespace: SimpleWhitespace<'a>, + comment: Option>, + newline: Newline<'a>, + ) -> Self { + Self { + indent, + whitespace, + comment, + newline, + } + } +} + +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +pub struct ParenthesizedWhitespace<'a> { + pub first_line: TrailingWhitespace<'a>, + pub empty_lines: Vec>, + pub indent: bool, + pub last_line: SimpleWhitespace<'a>, +} + +impl<'a> Codegen<'a> for ParenthesizedWhitespace<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.first_line.codegen(state); + for line in &self.empty_lines { + line.codegen(state); + } + if self.indent { + state.add_indent() + } + self.last_line.codegen(state); + } +} + +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +pub enum ParenthesizableWhitespace<'a> { + SimpleWhitespace(SimpleWhitespace<'a>), + ParenthesizedWhitespace(ParenthesizedWhitespace<'a>), +} + +impl<'a> Codegen<'a> for ParenthesizableWhitespace<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + match self { + Self::SimpleWhitespace(w) => w.codegen(state), + Self::ParenthesizedWhitespace(w) => w.codegen(state), + } + } +} + +impl<'a> Default for ParenthesizableWhitespace<'a> { + fn default() -> Self { + Self::SimpleWhitespace(SimpleWhitespace("")) + } +} diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs new file mode 100644 index 00000000..641d9630 --- /dev/null +++ b/native/libcst/src/parser/errors.rs @@ -0,0 +1,78 @@ +use pyo3::types::{IntoPyDict, PyModule}; +use pyo3::{IntoPy, PyErr, PyErrArguments, Python}; + +use crate::parser::grammar::TokVec; +use crate::tokenizer::whitespace_parser::WhitespaceError; +use crate::tokenizer::TokError; +use peg::Parse; +use thiserror::Error; + +#[allow(clippy::enum_variant_names)] +#[derive(Debug, Error, PartialEq, Eq)] +pub enum ParserError<'a> { + #[error("tokenizer error: {0}")] + TokenizerError(TokError<'a>, &'a str), + #[error("parser error: {0}")] + ParserError( + peg::error::ParseError< as Parse>::PositionRepr>, + &'a str, + ), + #[error(transparent)] + WhitespaceError(#[from] WhitespaceError), + #[error("invalid operator")] + OperatorError, +} + +impl<'a> From> for PyErr { + fn from(e: ParserError) -> Self { + Python::with_gil(|py| { + let lines = match &e { + ParserError::TokenizerError(_, text) | ParserError::ParserError(_, text) => { + text.lines().collect::>() + } + _ => vec![""], + }; + let (line, col) = match &e { + ParserError::ParserError(err, ..) => { + (err.location.start_pos.line, err.location.start_pos.column) + } + _ => (0, 0), + }; + let kwargs = [ + ("message", e.to_string().into_py(py)), + ("lines", lines.into_py(py)), + ("raw_line", line.into_py(py)), + ("raw_column", col.into_py(py)), + ] + .into_py_dict(py); + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + PyErr::from_instance( + libcst + .getattr("ParserSyntaxError") + .expect("ParserSyntaxError not found") + .call((), Some(kwargs)) + .expect("failed to instantiate"), + ) + }) + } +} + +struct Details { + message: String, + lines: Vec, + raw_line: u32, + raw_column: u32, +} + +impl<'a> PyErrArguments for Details { + fn arguments(self, py: pyo3::Python) -> pyo3::PyObject { + [ + ("message", self.message.into_py(py)), + ("lines", self.lines.into_py(py)), + ("raw_line", self.raw_line.into_py(py)), + ("raw_column", self.raw_column.into_py(py)), + ] + .into_py_dict(py) + .into_py(py) + } +} diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs new file mode 100644 index 00000000..f79594f2 --- /dev/null +++ b/native/libcst/src/parser/grammar.rs @@ -0,0 +1,2993 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::rc::Rc; + +use crate::nodes::*; +use crate::parser::ParserError; +use crate::tokenizer::{TokType, Token}; +use peg::str::LineCol; +use peg::{parser, Parse, ParseElem, RuleResult}; +use TokType::{ + Async, Await as AWAIT, Dedent, EndMarker, FStringEnd, FStringStart, FStringString, Indent, + Name as NameTok, Newline as NL, Number, String as STRING, +}; + +pub type Result<'a, T> = std::result::Result>; + +#[derive(Debug)] +pub struct TokVec<'a>(Vec>>); + +impl<'a> std::convert::From>> for TokVec<'a> { + fn from(vec: Vec>) -> Self { + TokVec(vec.into_iter().map(Rc::new).collect()) + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ParseLoc { + pub start_pos: LineCol, + pub end_pos: LineCol, +} + +impl std::fmt::Display for ParseLoc { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.start_pos.fmt(f) + } +} + +impl<'a> Parse for TokVec<'a> { + type PositionRepr = ParseLoc; + + fn start(&self) -> usize { + 0 + } + + fn is_eof(&self, pos: usize) -> bool { + pos >= self.0.len() + } + + fn position_repr(&self, pos: usize) -> Self::PositionRepr { + let tok = self.0.get(pos).unwrap_or_else(|| self.0.last().unwrap()); + ParseLoc { + start_pos: LineCol { + line: tok.start_pos.line_number(), + column: tok.start_pos.char_column_number(), + offset: tok.start_pos.byte_idx(), + }, + end_pos: LineCol { + line: tok.end_pos.line_number(), + column: tok.end_pos.char_column_number(), + offset: tok.end_pos.byte_idx(), + }, + } + } +} + +type TokenRef<'a> = Rc>; + +impl<'a> ParseElem for TokVec<'a> { + type Element = TokenRef<'a>; + + fn parse_elem(&self, pos: usize) -> RuleResult { + match self.0.get(pos) { + Some(tok) => RuleResult::Matched(pos + 1, tok.clone()), + None => RuleResult::Failed, + } + } +} + +parser! { + pub grammar python<'a>(input: &'a str) for TokVec<'a> { + + // Starting Rules + + pub rule file(encoding: Option<&str>) -> Module<'a> + = traced(<_file(encoding.unwrap_or("utf-8"))>) + + pub rule expression_input() -> Expression<'a> + = traced() + + pub rule statement_input() -> Statement<'a> + = traced() + + rule _file(encoding: &str) -> Module<'a> + = s:statements()? eof:tok(EndMarker, "EOF") { + make_module(s.unwrap_or_default(), eof, encoding) + } + + // General statements + + rule statements() -> Vec> + = statement()+ + + rule statement() -> Statement<'a> + = c:compound_stmt() { Statement::Compound(c) } + / s:simple_stmts() { + Statement::Simple(make_simple_statement_line(s)) + } + + rule simple_stmts() -> SimpleStatementParts<'a> + = first_tok:&_ stmts:separated_trailer(, ) nl:tok(NL, "NEWLINE") { + SimpleStatementParts { + first_tok, + first_statement: stmts.0, + rest: stmts.1, + last_semi: stmts.2, + nl, + } + } + + #[cache] + rule simple_stmt() -> SmallStatement<'a> + = assignment() + / e:star_expressions() { SmallStatement::Expr(Expr { value: e, semicolon: None }) } + / &lit("return") s:return_stmt() { SmallStatement::Return(s) } + // this is expanded from the original grammar's import_stmt rule + / &lit("import") i:import_name() { SmallStatement::Import(i) } + / &lit("from") i:import_from() { SmallStatement::ImportFrom(i) } + / &lit("raise") r:raise_stmt() { SmallStatement::Raise(r) } + / lit("pass") { SmallStatement::Pass(Pass { semicolon: None }) } + / &lit("del") s:del_stmt() { SmallStatement::Del(s) } + / &lit("yield") s:yield_stmt() { SmallStatement::Expr(Expr { value: s, semicolon: None }) } + / &lit("assert") s:assert_stmt() {SmallStatement::Assert(s)} + / lit("break") { SmallStatement::Break(Break { semicolon: None })} + / lit("continue") { SmallStatement::Continue(Continue { semicolon: None })} + / &lit("global") s:global_stmt() {SmallStatement::Global(s)} + / &lit("nonlocal") s:nonlocal_stmt() {SmallStatement::Nonlocal(s)} + + + rule compound_stmt() -> CompoundStatement<'a> + = &(lit("def") / lit("@") / tok(Async, "ASYNC")) f:function_def() { + CompoundStatement::FunctionDef(f) + } + / &lit("if") f:if_stmt() { CompoundStatement::If(f) } + / &(lit("class") / lit("@")) c:class_def() { CompoundStatement::ClassDef(c) } + / &(lit("with") / tok(Async, "ASYNC")) w:with_stmt() { CompoundStatement::With(w) } + / &(lit("for") / tok(Async, "ASYNC")) f:for_stmt() { CompoundStatement::For(f) } + / &lit("try") t:try_stmt() { CompoundStatement::Try(t) } + / &lit("while") w:while_stmt() { CompoundStatement::While(w) } + + // Simple statements + + rule assignment() -> SmallStatement<'a> + = a:name() col:lit(":") ann:expression() + rhs:(eq:lit("=") d:annotated_rhs() {(eq, d)})? { + SmallStatement::AnnAssign( + make_ann_assignment(AssignTargetExpression::Name(a), col, ann, rhs)) + } + // TODO: there's an extra '(' single_target ')' clause here in upstream + / a:single_subscript_attribute_target() col:lit(":") ann:expression() + rhs:(eq:lit("=") d:annotated_rhs() {(eq, d)})? { + SmallStatement::AnnAssign(make_ann_assignment(a, col, ann, rhs)) + } + / lhs:(t:star_targets() eq:lit("=") {(t, eq)})+ rhs:(yield_expr() / star_expressions()) !lit("=") { + SmallStatement::Assign(make_assignment(lhs, rhs)) + } + / t:single_target() op:augassign() rhs:(yield_expr() / star_expressions()) { + SmallStatement::AugAssign(make_aug_assign(t, op, rhs)) + } + + rule annotated_rhs() -> Expression<'a> + = yield_expr() / star_expressions() + + rule augassign() -> AugOp<'a> + = &(lit("+=") + / lit("-=") + / lit("*=") + / lit("@=") + / lit("/=") + / lit("%=") + / lit("&=") + / lit("|=") + / lit("^=") + / lit("<<=") + / lit(">>=") + / lit("**=") + / lit("//=")) tok:_ {? + make_aug_op(tok).map_err(|_| "aug_op") + } + + rule return_stmt() -> Return<'a> + = kw:lit("return") a:star_expressions()? { + make_return(kw, a) + } + + rule raise_stmt() -> Raise<'a> + = kw:lit("raise") exc:expression() + rest:(f:lit("from") cau:expression() {(f, cau)})? { + make_raise(kw, Some(exc), rest) + } + / kw:lit("raise") { + make_raise(kw, None, None) + } + + rule global_stmt() -> Global<'a> + = kw:lit("global") init:(n:name() c:comma() {(n, c)})* last:name() { + make_global(kw, init, last) + } + + rule nonlocal_stmt() -> Nonlocal<'a> + = kw:lit("nonlocal") init:(n:name() c:comma() {(n, c)})* last:name() { + make_nonlocal(kw, init, last) + } + + rule del_stmt() -> Del<'a> + = kw:lit("del") t:del_target() &(lit(";") / tok(NL, "NEWLINE")) { + make_del(kw, t) + } + / kw:lit("del") t:del_targets() &(lit(";") / tok(NL, "NEWLINE")) { + make_del(kw, make_del_tuple(None, t, None)) + } + + rule yield_stmt() -> Expression<'a> + = yield_expr() + + rule assert_stmt() -> Assert<'a> + = kw:lit("assert") test:expression() rest:(c:comma() msg:expression() {(c, msg)})? { + make_assert(kw, test, rest) + } + + // Import statements + + rule import_name() -> Import<'a> + = kw:lit("import") a:dotted_as_names() { + make_import(kw, a) + } + + rule import_from() -> ImportFrom<'a> + = from:lit("from") dots:dots()? m:dotted_name() + import:lit("import") als:import_from_targets() { + make_import_from(from, dots.unwrap_or_default(), Some(m), import, als) + } + / from:lit("from") dots:dots() + import:lit("import") als:import_from_targets() { + make_import_from(from, dots, None, import, als) + } + + rule import_from_targets() -> ParenthesizedImportNames<'a> + = lpar:lpar() als:import_from_as_names() c:comma()? rpar:rpar() { + let mut als = als; + if let (comma@Some(_), Some(mut last)) = (c, als.last_mut()) { + last.comma = comma; + } + (Some(lpar), ImportNames::Aliases(als), Some(rpar)) + } + / als:import_from_as_names() !lit(",") { (None, ImportNames::Aliases(als), None)} + / star:lit("*") { (None, ImportNames::Star(ImportStar {}), None) } + + rule import_from_as_names() -> Vec> + = items:separated(, ) { + make_import_from_as_names(items.0, items.1) + } + + rule import_from_as_name() -> ImportAlias<'a> + = n:name() asname:(kw:lit("as") z:name() {(kw, z)})? { + make_import_alias(NameOrAttribute::N(n), asname) + } + + rule dotted_as_names() -> Vec> + = init:(d:dotted_as_name() c:comma() {d.with_comma(c)})* + last:dotted_as_name() { + concat(init, vec![last]) + } + + rule dotted_as_name() -> ImportAlias<'a> + = n:dotted_name() asname:(kw:lit("as") z:name() {(kw, z)})? { + make_import_alias(n, asname) + } + + // TODO: why does this diverge from CPython? + rule dotted_name() -> NameOrAttribute<'a> + = first:name() tail:(dot:lit(".") n:name() {(dot, n)})* { + make_name_or_attr(first, tail) + } + + // Compound statements + + // Common elements + + #[cache] + rule block() -> Suite<'a> + = n:tok(NL, "NEWLINE") ind:tok(Indent, "INDENT") s:statements() ded:tok(Dedent, "DEDENT") { + make_indented_block(n, ind, s, ded) + } + / s:simple_stmts() { + make_simple_statement_suite(s) + } + + rule decorators() -> Vec> + = (at:lit("@") e:named_expression() nl:tok(NL, "NEWLINE") { + make_decorator(at, e, nl) + } )+ + + // Class definitions + + rule class_def() -> ClassDef<'a> + = d:decorators() c:class_def_raw() { c.with_decorators(d) } + / class_def_raw() + + rule class_def_raw() -> ClassDef<'a> + = kw:lit("class") n:name() arg:(l:lpar() a:arguments()? r:rpar() {(l, a, r)})? + col:lit(":") b:block() {? + make_class_def(kw, n, arg, col, b) + } + + // Function definitions + + rule function_def() -> FunctionDef<'a> + = d:decorators() f:function_def_raw() {f.with_decorators(d)} + / function_def_raw() + + rule _returns() -> Annotation<'a> + = l:lit("->") e:expression() { + make_annotation(l, e) + } + + rule function_def_raw() -> FunctionDef<'a> + = def:lit("def") n:name() op:lit("(") params:params()? + cp:lit(")") ty:_returns()? c:lit(":") b:block() { + make_function_def(None, def, n, op, params, cp, ty, c, b) + } + / asy:tok(Async, "ASYNC") def:lit("def") n:name() op:lit("(") params:params()? + cp:lit(")") ty:_returns()? c:lit(":") b:block() { + make_function_def(Some(asy), def, n, op, params, cp, ty, c, b) + } + + // Function parameters + + rule params() -> Parameters<'a> + = parameters() + + rule parameters() -> Parameters<'a> + = a:slash_no_default() b:param_no_default()* c:param_with_default()* d:star_etc()? { + make_parameters(Some(a), concat(b, c), d) + } + / a:slash_with_default() b:param_with_default()* d:star_etc()? { + make_parameters(Some(a), b, d) + } + / a:param_no_default()+ b:param_with_default()* d:star_etc()? { + make_parameters(None, concat(a, b), d) + } + / a:param_with_default()+ d:star_etc()? { + make_parameters(None, a, d) + } + / d:star_etc() { + make_parameters(None, vec![], Some(d)) + } + + rule slash_no_default() -> (Vec>, ParamSlash<'a>) + = a:param_no_default()+ slash:lit("/") com:comma() { + (a, ParamSlash { comma: Some(com)}) + } + / a:param_no_default()+ slash:lit("/") &lit(")") { + (a, ParamSlash { comma: None }) + } + + rule slash_with_default() -> (Vec>, ParamSlash<'a>) + = a:param_no_default()* b:param_with_default()+ slash:lit("/") c:comma() { + (concat(a, b), ParamSlash { comma: Some(c) }) + } + / a:param_no_default()* b:param_with_default()+ slash:lit("/") &lit(")") { + (concat(a, b), ParamSlash { comma: None }) + } + + rule star_etc() -> StarEtc<'a> + = star:lit("*") a:param_no_default() b:param_maybe_default()* kw:kwds()? { + StarEtc(Some(StarArg::Param(Box::new( + add_param_star(a, star)))), b, kw) + } + / lit("*") c:comma() b:param_maybe_default()+ kw:kwds()? { + StarEtc(Some(StarArg::Star(ParamStar {comma:c })), b, kw) + } + / kw:kwds() { StarEtc(None, vec![], Some(kw)) } + + rule kwds() -> Param<'a> + = star:lit("**") a:param_no_default() { + add_param_star(a, star) + } + + rule param_no_default() -> Param<'a> + = a:param() c:lit(",") { add_param_default(a, None, Some(c)) } + / a:param() &lit(")") {a} + + rule param_with_default() -> Param<'a> + = a:param() def:default() c:lit(",") { + add_param_default(a, Some(def), Some(c)) + } + / a:param() def:default() &lit(")") { + add_param_default(a, Some(def), None) + } + + rule param_maybe_default() -> Param<'a> + = a:param() def:default()? c:lit(",") { + add_param_default(a, def, Some(c)) + } + / a:param() def:default()? &lit(")") { + add_param_default(a, def, None) + } + + rule param() -> Param<'a> + = n:name() a:annotation()? { + Param {name: n, annotation: a, ..Default::default() } + } + + rule annotation() -> Annotation<'a> + = col:lit(":") e:expression() { + make_annotation(col, e) + } + + rule default() -> (AssignEqual<'a>, Expression<'a>) + = eq:lit("=") ex:expression() { + (make_assign_equal(eq), ex) + } + + // If statement + + rule if_stmt() -> If<'a> + = i:lit("if") a:named_expression() col:lit(":") b:block() elif:elif_stmt() { + make_if(i, a, col, b, Some(OrElse::Elif(elif)), false) + } + / i:lit("if") a:named_expression() col:lit(":") b:block() el:else_block()? { + make_if(i, a, col, b, el.map(OrElse::Else), false) + } + + rule elif_stmt() -> If<'a> + = i:lit("elif") a:named_expression() col:lit(":") b:block() elif:elif_stmt() { + make_if(i, a, col, b, Some(OrElse::Elif(elif)), true) + } + / i:lit("elif") a:named_expression() col:lit(":") b:block() el:else_block()? { + make_if(i, a, col, b, el.map(OrElse::Else), true) + } + + rule else_block() -> Else<'a> + = el:lit("else") col:lit(":") b:block() { + make_else(el, col, b) + } + + // While statement + + rule while_stmt() -> While<'a> + = kw:lit("while") test:named_expression() col:lit(":") b:block() el:else_block()? { + make_while(kw, test, col, b, el) + } + + // For statement + + rule for_stmt() -> For<'a> + = f:lit("for") t:star_targets() i:lit("in") it:star_expressions() + c:lit(":") b:block() el:else_block()? { + make_for(None, f, t, i, it, c, b, el) + } + / asy:tok(Async, "ASYNC") f:lit("for") t:star_targets() i:lit("in") + it:star_expressions() + c:lit(":") b:block() el:else_block()? { + make_for(Some(asy), f, t, i, it, c, b, el) + } + + // With statement + + rule with_stmt() -> With<'a> + = kw:lit("with") items:separated(, ) + col:lit(":") b:block() { + make_with(None, kw, comma_separate(items.0, items.1, None), col, b) + } + / asy:tok(Async, "ASYNC") kw:lit("with") items:separated(, ) + col:lit(":") b:block() { + make_with(Some(asy), kw, comma_separate(items.0, items.1, None), col, b) + } + + rule with_item() -> WithItem<'a> + = e:expression() a:lit("as") t:star_target() &(lit(",") / lit(":")) { + make_with_item(e, Some(a), Some(t)) + } + / e:expression() { + make_with_item(e, None, None) + } + + // Try statement + + rule try_stmt() -> Try<'a> + = kw:lit("try") lit(":") b:block() f:finally_block() { + make_try(kw, b, vec![], None, Some(f)) + } + / kw:lit("try") lit(":") b:block() ex:except_block()+ el:else_block()? + f:finally_block()? { + make_try(kw, b, ex, el, f) + } + + // Except statement + + rule except_block() -> ExceptHandler<'a> + = kw:lit("except") e:expression() a:(k:lit("as") n:name() {(k, n)})? + col:lit(":") b:block() { + make_except(kw, Some(e), a, col, b) + } + / kw:lit("except") col:lit(":") b:block() { + make_except(kw, None, None, col, b) + } + + rule finally_block() -> Finally<'a> + = kw:lit("finally") col:lit(":") b:block() { + make_finally(kw, col, b) + } + + + // Expressions + + #[cache] + rule expression() -> Expression<'a> + = _conditional_expression() + / lambdef() + + rule _conditional_expression() -> Expression<'a> + = body:disjunction() i:lit("if") test:disjunction() e:lit("else") oe:expression() { + Expression::IfExp(make_ifexp(body, i, test, e, oe)) + } + / disjunction() + + rule yield_expr() -> Expression<'a> + = y:lit("yield") f:lit("from") a:expression() { + Expression::Yield(make_yield(y, Some(f), Some(a))) + } + / y:lit("yield") a:star_expressions()? { + Expression::Yield(make_yield(y, None, a)) + } + + rule star_expressions() -> Expression<'a> + = first:star_expression() + rest:(comma:comma() e:star_expression() { (comma, expr_to_element(e)) })+ + comma:comma()? { + Expression::Tuple(make_tuple(expr_to_element(first), rest, comma, None, None)) + } + / e:star_expression() comma:comma() { + Expression::Tuple(make_tuple(expr_to_element(e), vec![], Some(comma), None, None)) + } + / star_expression() + + #[cache] + rule star_expression() -> Expression<'a> + = star:lit("*") e:bitwise_or() { + Expression::StarredElement(make_starred_element(star, expr_to_element(e))) + } + / expression() + + rule star_named_expressions() -> Vec> + = exps:separated_trailer(, ) { + comma_separate(exps.0, exps.1, exps.2) + } + + rule star_named_expression() -> Element<'a> + = star:lit("*") e:bitwise_or() { + Element::Starred(make_starred_element(star, expr_to_element(e))) + } + / e:named_expression() { expr_to_element(e) } + + rule named_expression() -> Expression<'a> + = a:name() op:lit(":=") b:expression() { + Expression::NamedExpr(make_named_expr(a, op, b)) + } + / e:expression() !lit(":=") { e } + + #[cache] + rule disjunction() -> Expression<'a> + = a:conjunction() b:(or:lit("or") inner:conjunction() { (or, inner) })+ {? + make_boolean_op(a, b).map_err(|e| "expected disjunction") + } + / conjunction() + + #[cache] + rule conjunction() -> Expression<'a> + = a:inversion() b:(and:lit("and") inner:inversion() { (and, inner) })+ {? + make_boolean_op(a, b).map_err(|e| "expected conjunction") + } + / inversion() + + #[cache] + rule inversion() -> Expression<'a> + = not:lit("not") a:inversion() {? + make_unary_op(not, a).map_err(|e| "expected inversion") + } + / comparison() + + // Comparison operators + + #[cache] + rule comparison() -> Expression<'a> + = a:bitwise_or() b:compare_op_bitwise_or_pair()+ { make_comparison(a, b) } + / bitwise_or() + + // This implementation diverges slightly from CPython (3.9) to avoid bloating + // the parser cache and increase readability. + #[cache] + rule compare_op_bitwise_or_pair() -> (CompOp<'a>, Expression<'a>) + = _op_bitwise_or("==") + / _op_bitwise_or("!=") // TODO: support barry_as_flufl + / _op_bitwise_or("<=") + / _op_bitwise_or("<") + / _op_bitwise_or(">=") + / _op_bitwise_or(">") + / _op_bitwise_or2("not", "in") + / _op_bitwise_or("in") + / _op_bitwise_or2("is", "not") + / _op_bitwise_or("is") + + rule _op_bitwise_or(o: &'static str) -> (CompOp<'a>, Expression<'a>) + = op:lit(o) e:bitwise_or() {? + make_comparison_operator(op) + .map(|op| (op, e)) + .map_err(|_| "comparison") + } + + rule _op_bitwise_or2(first: &'static str, second: &'static str) -> (CompOp<'a>, Expression<'a>) + = f:lit(first) s:lit(second) e:bitwise_or() {? + make_comparison_operator_2(f, s) + .map(|op| (op, e)) + .map_err(|_| "comparison") + } + + #[cache_left_rec] + rule bitwise_or() -> Expression<'a> + = a:bitwise_or() op:lit("|") b:bitwise_xor() {? + make_binary_op(a, op, b).map_err(|e| "expected bitwise_or") + } + / bitwise_xor() + + #[cache_left_rec] + rule bitwise_xor() -> Expression<'a> + = a:bitwise_xor() op:lit("^") b:bitwise_and() {? + make_binary_op(a, op, b).map_err(|e| "expected bitwise_xor") + } + / bitwise_and() + + #[cache_left_rec] + rule bitwise_and() -> Expression<'a> + = a:bitwise_and() op:lit("&") b:shift_expr() {? + make_binary_op(a, op, b).map_err(|e| "expected bitwise_and") + } + / shift_expr() + + #[cache_left_rec] + rule shift_expr() -> Expression<'a> + = a:shift_expr() op:lit("<<") b:sum() {? + make_binary_op(a, op, b).map_err(|e| "expected shift_expr") + } + / a:shift_expr() op:lit(">>") b:sum() {? + make_binary_op(a, op, b).map_err(|e| "expected shift_expr") + } + / sum() + + #[cache_left_rec] + rule sum() -> Expression<'a> + = a:sum() op:lit("+") b:term() {? + make_binary_op(a, op, b).map_err(|e| "expected sum") + } + / a:sum() op:lit("-") b:term() {? + make_binary_op(a, op, b).map_err(|e| "expected sum") + } + / term() + + #[cache_left_rec] + rule term() -> Expression<'a> + = a:term() op:lit("*") b:factor() {? + make_binary_op(a, op, b).map_err(|e| "expected term") + } + / a:term() op:lit("/") b:factor() {? + make_binary_op(a, op, b).map_err(|e| "expected term") + } + / a:term() op:lit("//") b:factor() {? + make_binary_op(a, op, b).map_err(|e| "expected term") + } + / a:term() op:lit("%") b:factor() {? + make_binary_op(a, op, b).map_err(|e| "expected term") + } + / a:term() op:lit("@") b:factor() {? + make_binary_op(a, op, b).map_err(|e| "expected term") + } + / factor() + + #[cache] + rule factor() -> Expression<'a> + = op:lit("+") a:factor() {? + make_unary_op(op, a).map_err(|e| "expected factor") + } + / op:lit("-") a:factor() {? + make_unary_op(op, a).map_err(|e| "expected factor") + } + / op:lit("~") a:factor() {? + make_unary_op(op, a).map_err(|e| "expected factor") + } + / power() + + rule power() -> Expression<'a> + = a:await_primary() op:lit("**") b:factor() {? + make_binary_op(a, op, b).map_err(|e| "expected power") + } + / await_primary() + + // Primary elements + + rule await_primary() -> Expression<'a> + = aw:tok(AWAIT, "AWAIT") e:primary() { + Expression::Await(make_await(aw, e)) + } + / primary() + + #[cache_left_rec] + rule primary() -> Expression<'a> + = v:primary() dot:lit(".") attr:name() { + Expression::Attribute(make_attribute(v, dot, attr)) + } + / a:primary() b:genexp() { + Expression::Call(make_genexp_call(a, b)) + } + / f:primary() lpar:lit("(") arg:arguments()? rpar:lit(")") { + Expression::Call(make_call(f, lpar, arg.unwrap_or_default(), rpar)) + } + / v:primary() lbrak:lbrak() s:slices() rbrak:rbrak() { + Expression::Subscript(make_subscript(v, lbrak, s, rbrak)) + } + / atom() + + rule slices() -> Vec> + = s:slice() !lit(",") { vec![SubscriptElement { slice: s, comma: None }] } + / slices:separated_trailer(, ) { + make_slices(slices.0, slices.1, slices.2) + } + + rule slice() -> BaseSlice<'a> + = l:expression()? col:lit(":") u:expression()? + rest:(c:lit(":") s:expression()? {(c, s)})? { + make_slice(l, col, u, rest) + } + / v:expression() { make_index(v) } + + rule atom() -> Expression<'a> + = n:name() { Expression::Name(n) } + / n:lit("True") { Expression::Name(make_name(n)) } + / n:lit("False") { Expression::Name(make_name(n)) } + / n:lit("None") { Expression::Name(make_name(n)) } + / &(tok(STRING, "") / tok(FStringStart, "")) s:strings() {s.into()} + / n:tok(Number, "NUMBER") { make_number(n) } + / &lit("(") e:(tuple() / group() / (g:genexp() {Expression::GeneratorExp(g)})) {e} + / &lit("[") e:(list() / listcomp()) {e} + / &lit("{") e:(dict() / set() / dictcomp() / setcomp()) {e} + / lit("...") { Expression::Ellipsis(Ellipsis {lpar: vec![], rpar: vec![]})} + + rule group() -> Expression<'a> + = lpar:lpar() e:(yield_expr() / named_expression()) rpar:rpar() { + e.with_parens(lpar, rpar) + } + + // Lambda functions + + rule lambdef() -> Expression<'a> + = kw:lit("lambda") p:lambda_params()? c:lit(":") b:expression() { + Expression::Lambda(make_lambda(kw, p.unwrap_or_default(), c, b)) + } + + rule lambda_params() -> Parameters<'a> + = lambda_parameters() + + // lambda_parameters etc. duplicates parameters but without annotations or type + // comments, and if there's no comma after a parameter, we expect a colon, not a + // close parenthesis. + + rule lambda_parameters() -> Parameters<'a> + = a:lambda_slash_no_default() b:lambda_param_no_default()* + c:lambda_param_with_default()* d:lambda_star_etc()? { + make_parameters(Some(a), concat(b, c), d) + } + / a:lambda_slash_with_default() b:lambda_param_with_default()* + d:lambda_star_etc()? { + make_parameters(Some(a), b, d) + } + / a:lambda_param_no_default()+ b:lambda_param_with_default()* + d:lambda_star_etc()? { + make_parameters(None, concat(a, b), d) + } + / a:lambda_param_with_default()+ d:lambda_star_etc()? { + make_parameters(None, a, d) + } + / d:lambda_star_etc() { + make_parameters(None, vec![], Some(d)) + } + + rule lambda_slash_no_default() -> (Vec>, ParamSlash<'a>) + = a:lambda_param_no_default()+ slash:lit("/") com:comma() { + (a, ParamSlash { comma: Some(com) } ) + } + / a:lambda_param_no_default()+ slash:lit("/") &lit(":") { + (a, ParamSlash { comma: None }) + } + + rule lambda_slash_with_default() -> (Vec>, ParamSlash<'a>) + = a:lambda_param_no_default()* b:lambda_param_with_default()+ slash:lit("/") c:comma(){ + (concat(a, b), ParamSlash { comma: Some(c) }) + } + / a:lambda_param_no_default()* b:lambda_param_with_default()+ slash:lit("/") &lit(":") { + (concat(a, b), ParamSlash { comma: None }) + } + + rule lambda_star_etc() -> StarEtc<'a> + = star:lit("*") a:lambda_param_no_default() + b:lambda_param_maybe_default()* kw:lambda_kwds()? { + StarEtc(Some(StarArg::Param( + Box::new(add_param_star(a, star)) + )), b, kw) + } + / lit("*") c:comma() b:lambda_param_maybe_default()+ kw:lambda_kwds()? { + StarEtc(Some(StarArg::Star(ParamStar {comma: c})), b, kw) + } + / kw:lambda_kwds() { StarEtc(None, vec![], Some(kw)) } + + rule lambda_kwds() -> Param<'a> + = star:lit("**") a:lambda_param_no_default() { + add_param_star(a, star) + } + + rule lambda_param_no_default() -> Param<'a> + = a:lambda_param() c:lit(",") { + add_param_default(a, None, Some(c)) + } + / a:lambda_param() &lit(":") {a} + + rule lambda_param_with_default() -> Param<'a> + = a:lambda_param() def:default() c:lit(",") { + add_param_default(a, Some(def), Some(c)) + } + / a:lambda_param() def:default() &lit(":") { + add_param_default(a, Some(def), None) + } + + rule lambda_param_maybe_default() -> Param<'a> + = a:lambda_param() def:default()? c:lit(",") { + add_param_default(a, def, Some(c)) + } + / a:lambda_param() def:default()? &lit(":") { + add_param_default(a, def, None) + } + + rule lambda_param() -> Param<'a> + = name:name() { Param { name, ..Default::default() } } + + // Literals + + rule strings() -> String<'a> + = s:(str:tok(STRING, "STRING") t:&_ {(make_string(str), t)} + / str:fstring() t:&_ {(String::Formatted(str), t)})+ { + make_strings(s) + } + + rule list() -> Expression<'a> + = lbrak:lbrak() e:star_named_expressions()? rbrak:rbrak() { + Expression::List( + make_list(lbrak, e.unwrap_or_default(), rbrak) + ) + } + + rule tuple() -> Expression<'a> + = lpar:lpar() first:star_named_expression() &lit(",") + rest:(c:comma() e:star_named_expression() {(c, e)})* + trailing_comma:comma()? rpar:rpar() { + Expression::Tuple( + make_tuple(first, rest, trailing_comma, Some(lpar), Some(rpar)) + ) + } + / lpar:lpar() rpar:lit(")") { + Expression::Tuple(Tuple::default().with_parens( + lpar, RightParen { whitespace_before: Default::default(), rpar_tok: rpar } + ))} + + rule set() -> Expression<'a> + = lbrace:lbrace() e:star_named_expressions()? rbrace:rbrace() { + Expression::Set(make_set(lbrace, e.unwrap_or_default(), rbrace)) + } + + // Dicts + + rule dict() -> Expression<'a> + = lbrace:lbrace() els:double_starred_keypairs()? rbrace:rbrace() { + Expression::Dict(make_dict(lbrace, els.unwrap_or_default(), rbrace)) + } + + + rule double_starred_keypairs() -> Vec> + = pairs:separated_trailer(, ) { + make_double_starred_keypairs(pairs.0, pairs.1, pairs.2) + } + + rule double_starred_kvpair() -> DictElement<'a> + = s:lit("**") e:bitwise_or() { + DictElement::Starred(make_double_starred_element(s, e)) + } + / k:kvpair() { make_dict_element(k) } + + rule kvpair() -> (Expression<'a>, TokenRef<'a>, Expression<'a>) + = k:expression() colon:lit(":") v:expression() { (k, colon, v) } + + // Comprehensions & generators + + rule for_if_clauses() -> CompFor<'a> + = c:for_if_clause()+ { merge_comp_fors(c) } + + rule for_if_clause() -> CompFor<'a> + = asy:_async() f:lit("for") tgt:star_targets() i:lit("in") + iter:disjunction() ifs:_comp_if()* { + make_for_if(Some(asy), f, tgt, i, iter, ifs) + } + / f:lit("for") tgt:star_targets() i:lit("in") + iter:disjunction() ifs:_comp_if()* { + make_for_if(None, f, tgt, i, iter, ifs) + } + + rule _comp_if() -> CompIf<'a> + = kw:lit("if") cond:disjunction() { + make_comp_if(kw, cond) + } + + rule listcomp() -> Expression<'a> + = lbrak:lbrak() elt:named_expression() comp:for_if_clauses() rbrak:rbrak() { + Expression::ListComp(make_list_comp(lbrak, elt, comp, rbrak)) + } + + rule setcomp() -> Expression<'a> + = l:lbrace() elt:named_expression() comp:for_if_clauses() r:rbrace() { + Expression::SetComp(make_set_comp(l, elt, comp, r)) + } + + rule genexp() -> GeneratorExp<'a> + = lpar:lpar() g:_bare_genexp() rpar:rpar() { + g.with_parens(lpar, rpar) + } + + rule _bare_genexp() -> GeneratorExp<'a> + = elt:named_expression() comp:for_if_clauses() { + make_bare_genexp(elt, comp) + } + + rule dictcomp() -> Expression<'a> + = lbrace:lbrace() elt:kvpair() comp:for_if_clauses() rbrace:rbrace() { + Expression::DictComp(make_dict_comp(lbrace, elt, comp, rbrace)) + } + + // Function call arguments + + rule arguments() -> Vec> + = a:args() trail:comma()? &lit(")") {add_arguments_trailing_comma(a, trail)} + + rule args() -> Vec> + = first:_posarg() + rest:(c:comma() a:_posarg() {(c, a)})* + kw:(c:comma() k:kwargs() {(c, k)})? { + let (trail, kw) = kw.map(|(x,y)| (Some(x), Some(y))).unwrap_or((None, None)); + concat( + comma_separate(first, rest, trail), + kw.unwrap_or_default(), + ) + } + / kwargs() + + rule _posarg() -> Arg<'a> + = a:(starred_expression() / e:named_expression() { make_arg(e) }) + !lit("=") { a } + + rule kwargs() -> Vec> + = sitems:separated(, ) + scomma:comma() + ditems:separated(, ) { + concat( + comma_separate(sitems.0, sitems.1, Some(scomma)), + comma_separate(ditems.0, ditems.1, None), + ) + } + / items:separated(, ) { + comma_separate(items.0, items.1, None) + } + / items:separated(, ) { + comma_separate(items.0, items.1, None) + } + + rule starred_expression() -> Arg<'a> + = star:lit("*") e:expression() { make_star_arg(star, e) } + + rule kwarg_or_starred() -> Arg<'a> + = _kwarg() + / starred_expression() + + rule kwarg_or_double_starred() -> Arg<'a> + = _kwarg() + / star:lit("**") e:expression() { make_star_arg(star, e) } + + rule _kwarg() -> Arg<'a> + = n:name() eq:lit("=") v:expression() { + make_kwarg(n, eq, v) + } + + // Assignment targets + // Generic targets + + rule star_targets() -> AssignTargetExpression<'a> + = a:star_target() !lit(",") {a} + / targets:separated_trailer(, ) { + AssignTargetExpression::Tuple( + make_tuple(targets.0, targets.1, targets.2, None, None) + ) + } + + rule star_targets_list_seq() -> Vec> + = targets:separated_trailer(, ) { + comma_separate(targets.0, targets.1, targets.2) + } + + // This differs from star_targets below because it requires at least two items + // in the tuple + rule star_targets_tuple_seq() -> Tuple<'a> + = first:(t:star_target() {assign_target_to_element(t)}) + rest:(c:comma() t:star_target() {(c, assign_target_to_element(t))})+ + trail:comma()? { + make_tuple(first, rest, trail, None, None) + } + / t:star_target() trail:comma()? { + make_tuple(assign_target_to_element(t), vec![], trail, None, None) + } + + #[cache] + rule star_target() -> AssignTargetExpression<'a> + = star:lit("*") !lit("*") t:star_target() { + AssignTargetExpression::StarredElement( + make_starred_element(star, assign_target_to_element(t)) + ) + } + / target_with_star_atom() + + #[cache] + rule target_with_star_atom() -> AssignTargetExpression<'a> + = a:t_primary() dot:lit(".") n:name() !t_lookahead() { + AssignTargetExpression::Attribute(make_attribute(a, dot, n)) + } + / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { + AssignTargetExpression::Subscript( + make_subscript(a, lbrak, s, rbrak) + ) + } + / a:star_atom() {a} + + rule star_atom() -> AssignTargetExpression<'a> + = a:name() { AssignTargetExpression::Name(a) } + / lpar:lpar() a:target_with_star_atom() rpar:rpar() { a.with_parens(lpar, rpar) } + / lpar:lpar() a:star_targets_tuple_seq()? rpar:rpar() { + AssignTargetExpression::Tuple( + a.unwrap_or_default().with_parens(lpar, rpar) + ) + } + / lbrak:lbrak() a:star_targets_list_seq()? rbrak:rbrak() { + AssignTargetExpression::List( + make_list(lbrak, a.unwrap_or_default(), rbrak) + ) + } + + rule single_target() -> AssignTargetExpression<'a> + = single_subscript_attribute_target() + / n:name() { AssignTargetExpression::Name(n) } + / lpar:lpar() t:single_target() rpar:rpar() { t.with_parens(lpar, rpar) } + + rule single_subscript_attribute_target() -> AssignTargetExpression<'a> + = a:t_primary() dot:lit(".") n:name() !t_lookahead() { + AssignTargetExpression::Attribute(make_attribute(a, dot, n)) + } + / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { + AssignTargetExpression::Subscript( + make_subscript(a, lbrak, s, rbrak) + ) + } + + + #[cache_left_rec] + rule t_primary() -> Expression<'a> + = value:t_primary() dot:lit(".") attr:name() &t_lookahead() { + Expression::Attribute(make_attribute(value, dot, attr)) + } + / v:t_primary() l:lbrak() s:slices() r:rbrak() &t_lookahead() { + Expression::Subscript(make_subscript(v, l, s, r)) + } + / f:t_primary() gen:genexp() &t_lookahead() { + Expression::Call(make_genexp_call(f, gen)) + } + / f:t_primary() lpar:lit("(") arg:arguments()? rpar:lit(")") &t_lookahead() { + Expression::Call(make_call(f, lpar, arg.unwrap_or_default(), rpar)) + } + / a:atom() &t_lookahead() {a} + + rule t_lookahead() -> () + = (lit("(") / lit("[") / lit(".")) {} + + // Targets for del statements + + rule del_targets() -> Vec> + = t:separated_trailer(, ) { + comma_separate(t.0, t.1, t.2) + } + + rule del_target() -> DelTargetExpression<'a> + = a:t_primary() d:lit(".") n:name() !t_lookahead() { + DelTargetExpression::Attribute(make_attribute(a, d, n)) + } + / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { + DelTargetExpression::Subscript( + make_subscript(a, lbrak, s, rbrak) + ) + } + / del_t_atom() + + rule del_t_atom() -> DelTargetExpression<'a> + = n:name() { DelTargetExpression::Name(n) } + / l:lpar() d:del_target() r:rpar() { d.with_parens(l, r) } + / l:lpar() d:del_targets()? r:rpar() { + make_del_tuple(Some(l), d.unwrap_or_default(), Some(r)) + } + / l:lbrak() d:del_targets()? r:rbrak() { + DelTargetExpression::List( + make_list(l, d.unwrap_or_default(), r) + ) + } + + // F-strings + + rule fstring() -> FormattedString<'a> + = start:tok(FStringStart, "f\"") + parts:(_f_string() / _f_replacement())* + end:tok(FStringEnd, "\"") { + make_fstring(start.string, parts, end.string) + } + + rule _f_string() -> FormattedStringContent<'a> + = t:tok(FStringString, "f-string contents") { + FormattedStringContent::Text(FormattedStringText { value: t.string }) + } + + rule _f_replacement() -> FormattedStringContent<'a> + = lb:lit("{") e:_f_expr() eq:lit("=")? + conv:(t:lit("!") c:_f_conversion() {(t,c)})? + spec:(t:lit(":") s:_f_spec() {(t,s)})? + rb:lit("}") { + FormattedStringContent::Expression( + make_fstring_expression(lb, e, eq, conv, spec, rb) + ) + } + + rule _f_expr() -> Expression<'a> + = (g:_bare_genexp() {Expression::GeneratorExp(g)}) + / _conditional_expression() + / yield_expr() + + rule _f_conversion() -> &'a str + = lit("r") {"r"} / lit("s") {"s"} / lit("a") {"a"} + + rule _f_spec() -> Vec> + = (_f_string() / _f_replacement())* + + // CST helpers + + rule comma() -> Comma<'a> + = c:lit(",") { make_comma(c) } + + rule dots() -> Vec> + = ds:((dot:lit(".") { make_dot(dot) })+ + / tok:lit("...") { + vec![make_dot(tok.clone()), make_dot(tok.clone()), make_dot(tok.clone())]} + )+ { ds.into_iter().flatten().collect() } + + rule lpar() -> LeftParen<'a> + = a:lit("(") { make_lpar(a) } + + rule rpar() -> RightParen<'a> + = a:lit(")") { make_rpar(a) } + + rule lbrak() -> LeftSquareBracket<'a> + = tok:lit("[") { make_left_bracket(tok) } + + rule rbrak() -> RightSquareBracket<'a> + = tok:lit("]") { make_right_bracket(tok) } + + rule lbrace() -> LeftCurlyBrace<'a> + = tok:lit("{") { make_left_brace(tok) } + + rule rbrace() -> RightCurlyBrace<'a> + = tok:lit("}") { make_right_brace(tok) } + + /// matches any token, not just whitespace + rule _() -> TokenRef<'a> + = [t] { t } + + rule lit(lit: &'static str) -> TokenRef<'a> + = [t] {? if t.string == lit { Ok(t) } else { Err(lit) } } + + rule tok(tok: TokType, err: &'static str) -> TokenRef<'a> + = [t] {? if t.r#type == tok { Ok(t) } else { Err(err) } } + + rule name() -> Name<'a> + = !( lit("False") / lit("None") / lit("True") / lit("and") / lit("as") / lit("assert") / lit("async") / lit("await") + / lit("break") / lit("class") / lit("continue") / lit("def") / lit("del") / lit("elif") / lit("else") + / lit("except") / lit("finally") / lit("for") / lit("from") / lit("global") / lit("if") / lit("import") + / lit("in") / lit("is") / lit("lambda") / lit("nonlocal") / lit("not") / lit("or") / lit("pass") / lit("raise") + / lit("return") / lit("try") / lit("while") / lit("with") / lit("yield") + ) + t:tok(NameTok, "NAME") {make_name(t)} + + rule _async() -> TokenRef<'a> + = tok(Async, "ASYNC") + + rule separated_trailer(el: rule, sep: rule) -> (El, Vec<(Sep, El)>, Option) + = e:el() rest:(s:sep() e:el() {(s, e)})* trailer:sep()? {(e, rest, trailer)} + + rule separated(el: rule, sep: rule) -> (El, Vec<(Sep, El)>) + = e:el() rest:(s:sep() e:el() {(s, e)})* {(e, rest)} + + rule traced(e: rule) -> T = + &(_* { + #[cfg(feature = "trace")] + { + println!("[PEG_INPUT_START]"); + println!("{}", input); + println!("[PEG_TRACE_START]"); + } + }) + e:e()? {? + #[cfg(feature = "trace")] + println!("[PEG_TRACE_STOP]"); + e.ok_or("") + } + + } +} + +#[allow(clippy::too_many_arguments)] +fn make_function_def<'a>( + async_tok: Option>, + def_tok: TokenRef<'a>, + name: Name<'a>, + open_paren_tok: TokenRef<'a>, + params: Option>, + close_paren_tok: TokenRef<'a>, + returns: Option>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> FunctionDef<'a> { + let asynchronous = async_tok.as_ref().map(|_| Asynchronous { + whitespace_after: Default::default(), + }); + FunctionDef { + name, + params: params.unwrap_or_default(), + body, + decorators: Default::default(), + returns, + asynchronous, + leading_lines: Default::default(), + lines_after_decorators: vec![], + whitespace_after_def: Default::default(), + whitespace_after_name: Default::default(), + whitespace_before_colon: Default::default(), + whitespace_before_params: Default::default(), + async_tok, + def_tok, + open_paren_tok, + close_paren_tok, + colon_tok, + } +} + +fn make_decorator<'a>( + at_tok: TokenRef<'a>, + name: Expression<'a>, + newline_tok: TokenRef<'a>, +) -> Decorator<'a> { + Decorator { + decorator: name, + leading_lines: Default::default(), + whitespace_after_at: Default::default(), + trailing_whitespace: Default::default(), + newline_tok, + at_tok, + } +} + +fn make_comparison<'a>( + head: Expression<'a>, + tail: Vec<(CompOp<'a>, Expression<'a>)>, +) -> Expression<'a> { + let mut comparisons = vec![]; + for (operator, e) in tail { + comparisons.push(ComparisonTarget { + operator, + comparator: e, + }); + } + Expression::Comparison(Comparison { + left: Box::new(head), + comparisons, + lpar: vec![], + rpar: vec![], + }) +} + +fn make_comparison_operator(tok: TokenRef) -> Result { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); + match tok.string { + "<" => Ok(CompOp::LessThan { + whitespace_after, + whitespace_before, + tok, + }), + ">" => Ok(CompOp::GreaterThan { + whitespace_after, + whitespace_before, + tok, + }), + "<=" => Ok(CompOp::LessThanEqual { + whitespace_after, + whitespace_before, + tok, + }), + ">=" => Ok(CompOp::GreaterThanEqual { + whitespace_after, + whitespace_before, + tok, + }), + "==" => Ok(CompOp::Equal { + whitespace_after, + whitespace_before, + tok, + }), + "!=" => Ok(CompOp::NotEqual { + whitespace_after, + whitespace_before, + tok, + }), + "in" => Ok(CompOp::In { + whitespace_after, + whitespace_before, + tok, + }), + "is" => Ok(CompOp::Is { + whitespace_after, + whitespace_before, + tok, + }), + _ => Err(ParserError::OperatorError), + } +} + +fn make_comparison_operator_2<'a>( + first: TokenRef<'a>, + second: TokenRef<'a>, +) -> Result<'a, CompOp<'a>> { + let whitespace_before = Default::default(); + let whitespace_between = Default::default(); + let whitespace_after = Default::default(); + + match (first.string, second.string) { + ("is", "not") => Ok(CompOp::IsNot { + whitespace_before, + whitespace_between, + whitespace_after, + is_tok: first, + not_tok: second, + }), + ("not", "in") => Ok(CompOp::NotIn { + whitespace_before, + whitespace_between, + whitespace_after, + not_tok: first, + in_tok: second, + }), + _ => Err(ParserError::OperatorError), + } +} + +fn make_boolean_op<'a>( + head: Expression<'a>, + tail: Vec<(TokenRef<'a>, Expression<'a>)>, +) -> Result<'a, Expression<'a>> { + if tail.is_empty() { + return Ok(head); + } + + let mut expr = head; + for (tok, right) in tail { + expr = Expression::BooleanOperation(BooleanOperation { + left: Box::new(expr), + operator: make_boolean_operator(tok)?, + right: Box::new(right), + lpar: vec![], + rpar: vec![], + }) + } + Ok(expr) +} + +fn make_boolean_operator(tok: TokenRef) -> Result { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); + match tok.string { + "and" => Ok(BooleanOp::And { + whitespace_after, + whitespace_before, + tok, + }), + "or" => Ok(BooleanOp::Or { + whitespace_after, + whitespace_before, + tok, + }), + _ => Err(ParserError::OperatorError), + } +} + +fn make_binary_op<'a>( + left: Expression<'a>, + op: TokenRef<'a>, + right: Expression<'a>, +) -> Result<'a, Expression<'a>> { + let operator = make_binary_operator(op)?; + Ok(Expression::BinaryOperation(BinaryOperation { + left: Box::new(left), + operator, + right: Box::new(right), + lpar: vec![], + rpar: vec![], + })) +} + +fn make_binary_operator(tok: TokenRef) -> Result { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); + + match tok.string { + "+" => Ok(BinaryOp::Add { + whitespace_after, + whitespace_before, + tok, + }), + "-" => Ok(BinaryOp::Subtract { + whitespace_after, + whitespace_before, + tok, + }), + "*" => Ok(BinaryOp::Multiply { + whitespace_after, + whitespace_before, + tok, + }), + "/" => Ok(BinaryOp::Divide { + whitespace_after, + whitespace_before, + tok, + }), + "//" => Ok(BinaryOp::FloorDivide { + whitespace_after, + whitespace_before, + tok, + }), + "%" => Ok(BinaryOp::Modulo { + whitespace_after, + whitespace_before, + tok, + }), + "**" => Ok(BinaryOp::Power { + whitespace_after, + whitespace_before, + tok, + }), + "<<" => Ok(BinaryOp::LeftShift { + whitespace_after, + whitespace_before, + tok, + }), + ">>" => Ok(BinaryOp::RightShift { + whitespace_after, + whitespace_before, + tok, + }), + "|" => Ok(BinaryOp::BitOr { + whitespace_after, + whitespace_before, + tok, + }), + "&" => Ok(BinaryOp::BitAnd { + whitespace_after, + whitespace_before, + tok, + }), + "^" => Ok(BinaryOp::BitXor { + whitespace_after, + whitespace_before, + tok, + }), + "@" => Ok(BinaryOp::MatrixMultiply { + whitespace_after, + whitespace_before, + tok, + }), + _ => Err(ParserError::OperatorError), + } +} + +fn make_unary_op<'a>(op: TokenRef<'a>, tail: Expression<'a>) -> Result<'a, Expression<'a>> { + let operator = make_unary_operator(op)?; + Ok(Expression::UnaryOperation(UnaryOperation { + operator, + expression: Box::new(tail), + lpar: vec![], + rpar: vec![], + })) +} + +fn make_unary_operator(tok: TokenRef) -> Result { + let whitespace_after = Default::default(); + match tok.string { + "+" => Ok(UnaryOp::Plus { + whitespace_after, + tok, + }), + "-" => Ok(UnaryOp::Minus { + whitespace_after, + tok, + }), + "~" => Ok(UnaryOp::BitInvert { + whitespace_after, + tok, + }), + "not" => Ok(UnaryOp::Not { + whitespace_after, + tok, + }), + _ => Err(ParserError::OperatorError), + } +} + +fn make_number(num: TokenRef) -> Expression { + super::numbers::parse_number(num.string) +} + +fn make_indented_block<'a>( + nl: TokenRef<'a>, + indent: TokenRef<'a>, + statements: Vec>, + dedent: TokenRef<'a>, +) -> Suite<'a> { + Suite::IndentedBlock(IndentedBlock { + body: statements, + header: Default::default(), + indent: Default::default(), + footer: Default::default(), + newline_tok: nl, + indent_tok: indent, + dedent_tok: dedent, + }) +} + +struct SimpleStatementParts<'a> { + first_tok: TokenRef<'a>, // The first token of the first statement. Used for its whitespace + first_statement: SmallStatement<'a>, + rest: Vec<(TokenRef<'a>, SmallStatement<'a>)>, // semicolon, statement pairs + last_semi: Option>, + nl: TokenRef<'a>, +} + +fn make_semicolon(tok: TokenRef) -> Semicolon { + Semicolon { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } +} + +fn _make_simple_statement( + parts: SimpleStatementParts, +) -> (TokenRef, Vec, TokenRef) { + let mut body = vec![]; + + let mut current = parts.first_statement; + for (semi, next) in parts.rest { + body.push(current.with_semicolon(Some(make_semicolon(semi)))); + current = next; + } + if let Some(semi) = parts.last_semi { + current = current.with_semicolon(Some(make_semicolon(semi))); + } + body.push(current); + + (parts.first_tok, body, parts.nl) +} + +fn make_simple_statement_suite(parts: SimpleStatementParts) -> Suite { + let (first_tok, body, newline_tok) = _make_simple_statement(parts); + + Suite::SimpleStatementSuite(SimpleStatementSuite { + body, + leading_whitespace: Default::default(), + trailing_whitespace: Default::default(), + first_tok, + newline_tok, + }) +} + +fn make_simple_statement_line(parts: SimpleStatementParts) -> SimpleStatementLine { + let (first_tok, body, newline_tok) = _make_simple_statement(parts); + SimpleStatementLine { + body, + leading_lines: Default::default(), + trailing_whitespace: Default::default(), + first_tok, + newline_tok, + } +} + +fn make_if<'a>( + if_tok: TokenRef<'a>, + cond: Expression<'a>, + colon_tok: TokenRef<'a>, + block: Suite<'a>, + orelse: Option>, + is_elif: bool, +) -> If<'a> { + If { + leading_lines: Default::default(), + whitespace_before_test: Default::default(), + test: cond, + whitespace_after_test: Default::default(), + body: block, + orelse: orelse.map(Box::new), + is_elif, + if_tok, + colon_tok, + } +} + +fn make_else<'a>(else_tok: TokenRef<'a>, colon_tok: TokenRef<'a>, block: Suite<'a>) -> Else<'a> { + Else { + leading_lines: Default::default(), + whitespace_before_colon: Default::default(), + body: block, + else_tok, + colon_tok, + } +} + +struct StarEtc<'a>(Option>, Vec>, Option>); + +fn make_parameters<'a>( + posonly: Option<(Vec>, ParamSlash<'a>)>, + params: Vec>, + star_etc: Option>, +) -> Parameters<'a> { + let (posonly_params, posonly_ind) = match posonly { + Some((a, b)) => (a, Some(b)), + None => (vec![], None), + }; + let (star_arg, kwonly_params, star_kwarg) = match star_etc { + None => (None, vec![], None), + Some(StarEtc(a, b, c)) => (a, b, c), + }; + Parameters { + params, + star_arg, + kwonly_params, + star_kwarg, + posonly_params, + posonly_ind, + } +} + +fn add_param_default<'a>( + param: Param<'a>, + def: Option<(AssignEqual<'a>, Expression<'a>)>, + comma_tok: Option>, +) -> Param<'a> { + let comma = comma_tok.map(make_comma); + + let (equal, default) = match def { + Some((a, b)) => (Some(a), Some(b)), + None => (None, None), + }; + Param { + equal, + default, + comma, + ..param + } +} + +fn add_param_star<'a>(param: Param<'a>, star: TokenRef<'a>) -> Param<'a> { + let str = star.string; + Param { + star: Some(str), + star_tok: Some(star), + ..param + } +} + +fn make_assign_equal(tok: TokenRef) -> AssignEqual { + AssignEqual { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } +} + +fn make_comma(tok: TokenRef) -> Comma { + Comma { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } +} + +fn concat(a: Vec, b: Vec) -> Vec { + a.into_iter().chain(b.into_iter()).collect() +} + +fn make_name_or_attr<'a>( + first_tok: Name<'a>, + mut tail: Vec<(TokenRef<'a>, Name<'a>)>, +) -> NameOrAttribute<'a> { + if let Some((dot, name)) = tail.pop() { + let dot = make_dot(dot); + return NameOrAttribute::A(Attribute { + attr: name, + dot, + lpar: Default::default(), + rpar: Default::default(), + value: Box::new(make_name_or_attr(first_tok, tail).into()), + }); + } else { + NameOrAttribute::N(first_tok) + } +} + +fn make_name(tok: TokenRef) -> Name { + Name { + value: tok.string, + ..Default::default() + } +} + +fn make_dot(tok: TokenRef) -> Dot { + Dot { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } +} + +fn make_import_alias<'a>( + name: NameOrAttribute<'a>, + asname: Option<(TokenRef<'a>, Name<'a>)>, +) -> ImportAlias<'a> { + ImportAlias { + name, + asname: asname.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))), + comma: None, + } +} + +fn make_as_name<'a>(as_tok: TokenRef<'a>, name: AssignTargetExpression<'a>) -> AsName<'a> { + AsName { + name, + whitespace_before_as: Default::default(), + whitespace_after_as: Default::default(), + as_tok, + } +} + +type ParenthesizedImportNames<'a> = ( + Option>, + ImportNames<'a>, + Option>, +); + +fn make_import_from<'a>( + from_tok: TokenRef<'a>, + dots: Vec>, + module: Option>, + import_tok: TokenRef<'a>, + aliases: ParenthesizedImportNames<'a>, +) -> ImportFrom<'a> { + let (lpar, names, rpar) = aliases; + + ImportFrom { + module, + names, + relative: dots, + lpar, + rpar, + semicolon: None, + whitespace_after_from: Default::default(), + whitespace_after_import: Default::default(), + whitespace_before_import: Default::default(), + from_tok, + import_tok, + } +} + +fn make_import<'a>(import_tok: TokenRef<'a>, names: Vec>) -> Import<'a> { + Import { + names, + whitespace_after_import: Default::default(), + semicolon: None, + import_tok, + } +} + +fn make_import_from_as_names<'a>( + first: ImportAlias<'a>, + tail: Vec<(Comma<'a>, ImportAlias<'a>)>, +) -> Vec> { + let mut ret = vec![]; + let mut cur = first; + for (comma, alias) in tail { + ret.push(cur.with_comma(comma)); + cur = alias; + } + ret.push(cur); + ret +} + +fn make_lpar(tok: TokenRef) -> LeftParen { + LeftParen { + whitespace_after: Default::default(), + lpar_tok: tok, + } +} + +fn make_rpar(tok: TokenRef) -> RightParen { + RightParen { + whitespace_before: Default::default(), + rpar_tok: tok, + } +} + +fn make_module<'a>(body: Vec>, tok: TokenRef<'a>, encoding: &str) -> Module<'a> { + Module { + body, + header: Default::default(), + footer: Default::default(), + eof_tok: tok, + default_indent: " ", + default_newline: "\n", + has_trailing_newline: false, + encoding: encoding.to_string(), + } +} + +fn make_attribute<'a>(value: Expression<'a>, dot: TokenRef<'a>, attr: Name<'a>) -> Attribute<'a> { + let dot = make_dot(dot); + Attribute { + attr, + dot, + lpar: Default::default(), + rpar: Default::default(), + value: Box::new(value), + } +} + +fn make_starred_element<'a>(star_tok: TokenRef<'a>, rest: Element<'a>) -> StarredElement<'a> { + let value = match rest { + Element::Simple { value, .. } => value, + _ => panic!("Internal error while making starred element"), + }; + StarredElement { + value: Box::new(value), + whitespace_before_value: Default::default(), + lpar: Default::default(), + rpar: Default::default(), + comma: Default::default(), + star_tok, + } +} + +fn assign_target_to_element(expr: AssignTargetExpression) -> Element { + match expr { + AssignTargetExpression::Attribute(a) => Element::Simple { + value: Expression::Attribute(a), + comma: Default::default(), + }, + AssignTargetExpression::Name(a) => Element::Simple { + value: Expression::Name(a), + comma: Default::default(), + }, + AssignTargetExpression::Tuple(a) => Element::Simple { + value: Expression::Tuple(a), + comma: Default::default(), + }, + AssignTargetExpression::StarredElement(s) => Element::Starred(s), + AssignTargetExpression::List(l) => Element::Simple { + value: Expression::List(l), + comma: Default::default(), + }, + AssignTargetExpression::Subscript(s) => Element::Simple { + value: Expression::Subscript(s), + comma: Default::default(), + }, + } +} + +fn make_assignment<'a>( + lhs: Vec<(AssignTargetExpression<'a>, TokenRef<'a>)>, + rhs: Expression<'a>, +) -> Assign<'a> { + let mut targets = vec![]; + for (target, equal_tok) in lhs { + targets.push(AssignTarget { + target, + whitespace_before_equal: Default::default(), + whitespace_after_equal: Default::default(), + equal_tok, + }); + } + Assign { + targets, + value: rhs, + semicolon: Default::default(), + } +} + +fn expr_to_element(expr: Expression) -> Element { + Element::Simple { + value: expr, + comma: Default::default(), + } +} + +fn make_tuple<'a>( + first: Element<'a>, + rest: Vec<(Comma<'a>, Element<'a>)>, + trailing_comma: Option>, + lpar: Option>, + rpar: Option>, +) -> Tuple<'a> { + let elements = comma_separate(first, rest, trailing_comma); + + let lpar = lpar.map(|l| vec![l]).unwrap_or_default(); + let rpar = rpar.map(|r| vec![r]).unwrap_or_default(); + + Tuple { + elements, + lpar, + rpar, + } +} + +fn make_kwarg<'a>(name: Name<'a>, eq: TokenRef<'a>, value: Expression<'a>) -> Arg<'a> { + let equal = Some(make_assign_equal(eq)); + let keyword = Some(name); + Arg { + value, + keyword, + equal, + comma: None, + star: "", + whitespace_after_star: Default::default(), + whitespace_after_arg: Default::default(), + star_tok: None, + } +} + +fn make_star_arg<'a>(star: TokenRef<'a>, expr: Expression<'a>) -> Arg<'a> { + let str = star.string; + Arg { + value: expr, + keyword: None, + equal: None, + comma: None, + star: str, + whitespace_after_star: Default::default(), + whitespace_after_arg: Default::default(), + star_tok: Some(star), + } +} + +fn make_call<'a>( + func: Expression<'a>, + lpar_tok: TokenRef<'a>, + args: Vec>, + rpar_tok: TokenRef<'a>, +) -> Call<'a> { + let lpar = vec![]; + let rpar = vec![]; + let func = Box::new(func); + + Call { + func, + args, + lpar, + rpar, + whitespace_after_func: Default::default(), + whitespace_before_args: Default::default(), + lpar_tok, + rpar_tok, + } +} + +fn make_genexp_call<'a>(func: Expression<'a>, mut genexp: GeneratorExp<'a>) -> Call<'a> { + // func ( (genexp) ) + // ^ + // lpar_tok + + // lpar_tok is the same token that was used to parse genexp's first lpar. + // Nothing owns the whitespace before lpar_tok, so the same token is passed in here + // again, to be converted into whitespace_after_func. We then split off a pair of + // parenthesis from genexp, since now Call will own them. + + let mut lpars = genexp.lpar.into_iter(); + let lpar_tok = lpars.next().expect("genexp without lpar").lpar_tok; + genexp.lpar = lpars.collect(); + let rpar_tok = genexp.rpar.pop().expect("genexp without rpar").rpar_tok; + + Call { + func: Box::new(func), + args: vec![Arg { + value: Expression::GeneratorExp(genexp), + keyword: None, + equal: None, + comma: None, + star: "", + whitespace_after_star: Default::default(), + whitespace_after_arg: Default::default(), + star_tok: None, + }], + lpar: vec![], + rpar: vec![], + whitespace_after_func: Default::default(), + whitespace_before_args: Default::default(), + lpar_tok, + rpar_tok, + } +} + +fn make_arg(expr: Expression) -> Arg { + Arg { + value: expr, + keyword: Default::default(), + equal: Default::default(), + comma: Default::default(), + star: Default::default(), + whitespace_after_star: Default::default(), + whitespace_after_arg: Default::default(), + star_tok: None, + } +} + +fn make_comp_if<'a>(if_tok: TokenRef<'a>, test: Expression<'a>) -> CompIf<'a> { + CompIf { + test, + whitespace_before: Default::default(), + whitespace_before_test: Default::default(), + if_tok, + } +} + +fn make_for_if<'a>( + async_tok: Option>, + for_tok: TokenRef<'a>, + target: AssignTargetExpression<'a>, + in_tok: TokenRef<'a>, + iter: Expression<'a>, + ifs: Vec>, +) -> CompFor<'a> { + let inner_for_in = None; + let asynchronous = async_tok.as_ref().map(|_| Asynchronous { + whitespace_after: Default::default(), + }); + + CompFor { + target, + iter, + ifs, + inner_for_in, + asynchronous, + whitespace_before: Default::default(), + whitespace_after_for: Default::default(), + whitespace_before_in: Default::default(), + whitespace_after_in: Default::default(), + async_tok, + for_tok, + in_tok, + } +} + +fn make_bare_genexp<'a>(elt: Expression<'a>, for_in: CompFor<'a>) -> GeneratorExp<'a> { + GeneratorExp { + elt: Box::new(elt), + for_in: Box::new(for_in), + lpar: Default::default(), + rpar: Default::default(), + } +} + +fn merge_comp_fors(comp_fors: Vec) -> CompFor { + let mut it = comp_fors.into_iter().rev(); + let first = it.next().expect("cant merge empty comp_fors"); + + it.fold(first, |acc, curr| CompFor { + inner_for_in: Some(Box::new(acc)), + ..curr + }) +} + +fn make_left_bracket(tok: TokenRef) -> LeftSquareBracket { + LeftSquareBracket { + whitespace_after: Default::default(), + tok, + } +} + +fn make_right_bracket(tok: TokenRef) -> RightSquareBracket { + RightSquareBracket { + whitespace_before: Default::default(), + tok, + } +} + +fn make_left_brace(tok: TokenRef) -> LeftCurlyBrace { + LeftCurlyBrace { + whitespace_after: Default::default(), + tok, + } +} + +fn make_right_brace(tok: TokenRef) -> RightCurlyBrace { + RightCurlyBrace { + whitespace_before: Default::default(), + tok, + } +} + +fn make_list_comp<'a>( + lbracket: LeftSquareBracket<'a>, + elt: Expression<'a>, + for_in: CompFor<'a>, + rbracket: RightSquareBracket<'a>, +) -> ListComp<'a> { + ListComp { + elt: Box::new(elt), + for_in: Box::new(for_in), + lbracket, + rbracket, + lpar: Default::default(), + rpar: Default::default(), + } +} + +fn make_set_comp<'a>( + lbrace: LeftCurlyBrace<'a>, + elt: Expression<'a>, + for_in: CompFor<'a>, + rbrace: RightCurlyBrace<'a>, +) -> SetComp<'a> { + SetComp { + elt: Box::new(elt), + for_in: Box::new(for_in), + lbrace, + rbrace, + lpar: Default::default(), + rpar: Default::default(), + } +} + +fn make_dict_comp<'a>( + lbrace: LeftCurlyBrace<'a>, + kvpair: (Expression<'a>, TokenRef<'a>, Expression<'a>), + for_in: CompFor<'a>, + rbrace: RightCurlyBrace<'a>, +) -> DictComp<'a> { + let (key, colon_tok, value) = kvpair; + + DictComp { + key: Box::new(key), + value: Box::new(value), + for_in: Box::new(for_in), + lbrace, + rbrace, + lpar: vec![], + rpar: vec![], + whitespace_before_colon: Default::default(), + whitespace_after_colon: Default::default(), + colon_tok, + } +} + +fn make_list<'a>( + lbracket: LeftSquareBracket<'a>, + elements: Vec>, + rbracket: RightSquareBracket<'a>, +) -> List<'a> { + List { + elements, + lbracket, + rbracket, + lpar: Default::default(), + rpar: Default::default(), + } +} + +fn make_set<'a>( + lbrace: LeftCurlyBrace<'a>, + elements: Vec>, + rbrace: RightCurlyBrace<'a>, +) -> Set<'a> { + Set { + elements, + lbrace, + rbrace, + lpar: Default::default(), + rpar: Default::default(), + } +} + +fn comma_separate<'a, T>( + first: T, + rest: Vec<(Comma<'a>, T)>, + trailing_comma: Option>, +) -> Vec +where + T: WithComma<'a>, +{ + let mut elements = vec![]; + let mut current = first; + for (comma, next) in rest { + elements.push(current.with_comma(comma)); + current = next; + } + if let Some(comma) = trailing_comma { + current = current.with_comma(comma); + } + elements.push(current); + elements +} + +fn make_dict<'a>( + lbrace: LeftCurlyBrace<'a>, + elements: Vec>, + rbrace: RightCurlyBrace<'a>, +) -> Dict<'a> { + Dict { + elements, + lbrace, + rbrace, + lpar: Default::default(), + rpar: Default::default(), + } +} + +fn make_double_starred_keypairs<'a>( + first: DictElement<'a>, + rest: Vec<(Comma<'a>, DictElement<'a>)>, + trailing_comma: Option>, +) -> Vec> { + let mut elements = vec![]; + let mut current = first; + for (comma, next) in rest { + elements.push(current.with_comma(comma)); + current = next; + } + if let Some(mut comma) = trailing_comma { + // don't consume trailing whitespace for trailing comma + comma.whitespace_after = ParenthesizableWhitespace::SimpleWhitespace(SimpleWhitespace("")); + current = current.with_comma(comma); + } + elements.push(current); + elements +} + +fn make_dict_element<'a>(el: (Expression<'a>, TokenRef<'a>, Expression<'a>)) -> DictElement<'a> { + let (key, colon_tok, value) = el; + DictElement::Simple { + key, + value, + comma: Default::default(), + whitespace_before_colon: Default::default(), + whitespace_after_colon: Default::default(), + colon_tok, + } +} + +fn make_double_starred_element<'a>( + star_tok: TokenRef<'a>, + value: Expression<'a>, +) -> StarredDictElement<'a> { + StarredDictElement { + value, + comma: Default::default(), + whitespace_before_value: Default::default(), + star_tok, + } +} + +fn make_index(value: Expression) -> BaseSlice { + BaseSlice::Index(Index { value }) +} + +fn make_colon(tok: TokenRef) -> Colon { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); + Colon { + whitespace_before, + whitespace_after, + tok, + } +} + +fn make_slice<'a>( + lower: Option>, + first_colon: TokenRef<'a>, + upper: Option>, + rest: Option<(TokenRef<'a>, Option>)>, +) -> BaseSlice<'a> { + let first_colon = make_colon(first_colon); + let (second_colon, step) = if let Some((tok, step)) = rest { + (Some(make_colon(tok)), step) + } else { + (None, None) + }; + BaseSlice::Slice(Slice { + lower, + upper, + step, + first_colon, + second_colon, + }) +} + +fn make_slices<'a>( + first: BaseSlice<'a>, + rest: Vec<(Comma<'a>, BaseSlice<'a>)>, + trailing_comma: Option>, +) -> Vec> { + let mut elements = vec![]; + let mut current = first; + for (comma, next) in rest { + elements.push(SubscriptElement { + slice: current, + comma: Some(comma), + }); + current = next; + } + elements.push(SubscriptElement { + slice: current, + comma: trailing_comma, + }); + elements +} + +fn make_subscript<'a>( + value: Expression<'a>, + lbracket: LeftSquareBracket<'a>, + slice: Vec>, + rbracket: RightSquareBracket<'a>, +) -> Subscript<'a> { + let lbracket_tok = lbracket.tok.clone(); + Subscript { + value: Box::new(value), + slice, + lbracket, + rbracket, + lpar: Default::default(), + rpar: Default::default(), + whitespace_after_value: Default::default(), + lbracket_tok, + } +} + +fn make_ifexp<'a>( + body: Expression<'a>, + if_tok: TokenRef<'a>, + test: Expression<'a>, + else_tok: TokenRef<'a>, + orelse: Expression<'a>, +) -> IfExp<'a> { + IfExp { + test: Box::new(test), + body: Box::new(body), + orelse: Box::new(orelse), + lpar: Default::default(), + rpar: Default::default(), + whitespace_before_if: Default::default(), + whitespace_after_if: Default::default(), + whitespace_before_else: Default::default(), + whitespace_after_else: Default::default(), + if_tok, + else_tok, + } +} + +fn add_arguments_trailing_comma<'a>( + mut args: Vec>, + trailing_comma: Option>, +) -> Vec> { + if let Some(comma) = trailing_comma { + let last = args.pop().unwrap(); + args.push(last.with_comma(comma)); + } + args +} + +fn make_lambda<'a>( + lambda_tok: TokenRef<'a>, + params: Parameters<'a>, + colon_tok: TokenRef<'a>, + expr: Expression<'a>, +) -> Lambda<'a> { + let colon = make_colon(colon_tok); + Lambda { + params: Box::new(params), + body: Box::new(expr), + colon, + lpar: Default::default(), + rpar: Default::default(), + whitespace_after_lambda: Default::default(), + lambda_tok, + } +} + +fn make_annotation<'a>(tok: TokenRef<'a>, ann: Expression<'a>) -> Annotation<'a> { + Annotation { + annotation: ann, + whitespace_before_indicator: Default::default(), + whitespace_after_indicator: Default::default(), + tok, + } +} + +fn make_ann_assignment<'a>( + target: AssignTargetExpression<'a>, + col: TokenRef<'a>, + ann: Expression<'a>, + rhs: Option<(TokenRef<'a>, Expression<'a>)>, +) -> AnnAssign<'a> { + let annotation = make_annotation(col, ann); + let (eq, value) = rhs.map(|(x, y)| (Some(x), Some(y))).unwrap_or((None, None)); + let equal = eq.map(make_assign_equal); + AnnAssign { + target, + annotation, + value, + equal, + semicolon: None, + } +} + +fn make_yield<'a>( + yield_tok: TokenRef<'a>, + f: Option>, + e: Option>, +) -> Yield<'a> { + let value = match (f, e) { + (None, None) => None, + (Some(f), Some(e)) => Some(YieldValue::From(make_from(f, e))), + (None, Some(e)) => Some(YieldValue::Expression(e)), + _ => panic!("yield from without expression"), + }; + Yield { + value: value.map(Box::new), + lpar: Default::default(), + rpar: Default::default(), + whitespace_after_yield: Default::default(), + yield_tok, + } +} + +fn make_from<'a>(tok: TokenRef<'a>, e: Expression<'a>) -> From<'a> { + From { + item: e, + whitespace_before_from: Default::default(), + whitespace_after_from: Default::default(), + tok, + } +} + +fn make_return<'a>(return_tok: TokenRef<'a>, value: Option>) -> Return<'a> { + Return { + value, + whitespace_after_return: Default::default(), + semicolon: Default::default(), + return_tok, + } +} + +fn make_assert<'a>( + assert_tok: TokenRef<'a>, + test: Expression<'a>, + rest: Option<(Comma<'a>, Expression<'a>)>, +) -> Assert<'a> { + let (comma, msg) = if let Some((c, msg)) = rest { + (Some(c), Some(msg)) + } else { + (None, None) + }; + + Assert { + test, + msg, + comma, + whitespace_after_assert: Default::default(), + semicolon: Default::default(), + assert_tok, + } +} + +fn make_raise<'a>( + raise_tok: TokenRef<'a>, + exc: Option>, + rest: Option<(TokenRef<'a>, Expression<'a>)>, +) -> Raise<'a> { + let cause = rest.map(|(t, e)| make_from(t, e)); + + Raise { + exc, + cause, + whitespace_after_raise: Default::default(), + semicolon: Default::default(), + raise_tok, + } +} + +fn make_global<'a>( + tok: TokenRef<'a>, + init: Vec<(Name<'a>, Comma<'a>)>, + last: Name<'a>, +) -> Global<'a> { + let mut names: Vec> = init + .into_iter() + .map(|(name, c)| NameItem { + name, + comma: Some(c), + }) + .collect(); + names.push(NameItem { + name: last, + comma: None, + }); + Global { + names, + whitespace_after_global: Default::default(), + semicolon: Default::default(), + tok, + } +} + +fn make_nonlocal<'a>( + tok: TokenRef<'a>, + init: Vec<(Name<'a>, Comma<'a>)>, + last: Name<'a>, +) -> Nonlocal<'a> { + let mut names: Vec> = init + .into_iter() + .map(|(name, c)| NameItem { + name, + comma: Some(c), + }) + .collect(); + names.push(NameItem { + name: last, + comma: None, + }); + Nonlocal { + names, + whitespace_after_nonlocal: Default::default(), + semicolon: Default::default(), + tok, + } +} + +#[allow(clippy::too_many_arguments)] +fn make_for<'a>( + async_tok: Option>, + for_tok: TokenRef<'a>, + target: AssignTargetExpression<'a>, + in_tok: TokenRef<'a>, + iter: Expression<'a>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, + orelse: Option>, +) -> For<'a> { + let asynchronous = async_tok.as_ref().map(|_| Asynchronous { + whitespace_after: Default::default(), + }); + + For { + target, + iter, + body, + orelse, + asynchronous, + leading_lines: Default::default(), + whitespace_after_for: Default::default(), + whitespace_before_in: Default::default(), + whitespace_after_in: Default::default(), + whitespace_before_colon: Default::default(), + async_tok, + for_tok, + in_tok, + colon_tok, + } +} + +fn make_while<'a>( + while_tok: TokenRef<'a>, + test: Expression<'a>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, + orelse: Option>, +) -> While<'a> { + While { + test, + body, + orelse, + leading_lines: Default::default(), + whitespace_after_while: Default::default(), + whitespace_before_colon: Default::default(), + while_tok, + colon_tok, + } +} + +fn make_await<'a>(await_tok: TokenRef<'a>, expression: Expression<'a>) -> Await<'a> { + Await { + expression: Box::new(expression), + lpar: Default::default(), + rpar: Default::default(), + whitespace_after_await: Default::default(), + await_tok, + } +} + +fn make_class_def<'a>( + class_tok: TokenRef<'a>, + name: Name<'a>, + args: Option<(LeftParen<'a>, Option>>, RightParen<'a>)>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> std::result::Result, &'static str> { + let mut bases = vec![]; + let mut keywords = vec![]; + let mut parens_tok = None; + let mut lpar = None; + let mut rpar = None; + + if let Some((lpar_, args, rpar_)) = args { + parens_tok = Some((lpar_.lpar_tok.clone(), rpar_.rpar_tok.clone())); + lpar = Some(lpar_); + rpar = Some(rpar_); + if let Some(args) = args { + let mut current_arg = &mut bases; + let mut seen_keyword = false; + for arg in args { + if arg.star == "**" || arg.keyword.is_some() { + current_arg = &mut keywords; + seen_keyword = true; + } + if seen_keyword + && (arg.star == "*" || (arg.star.is_empty() && arg.keyword.is_none())) + { + return Err("Positional argument follows keyword argument"); + } + // TODO: libcst-python does validation here + current_arg.push(arg); + } + } + } + Ok(ClassDef { + name, + body, + bases, + keywords, + decorators: vec![], + lpar, + rpar, + leading_lines: Default::default(), + lines_after_decorators: Default::default(), + whitespace_after_class: Default::default(), + whitespace_after_name: Default::default(), + whitespace_before_colon: Default::default(), + class_tok, + parens_tok, + colon_tok, + }) +} + +fn make_string(tok: TokenRef) -> String { + String::Simple(SimpleString { + value: tok.string, + ..Default::default() + }) +} + +fn make_strings<'a>(s: Vec<(String<'a>, TokenRef<'a>)>) -> String<'a> { + let mut strings = s.into_iter().rev(); + let (first, _) = strings.next().expect("no strings to make a string of"); + strings.fold(first, |acc, (str, tok)| { + let ret: String<'a> = String::Concatenated(ConcatenatedString { + left: Box::new(str), + right: Box::new(acc), + whitespace_between: Default::default(), + lpar: Default::default(), + rpar: Default::default(), + right_tok: tok, + }); + ret + }) +} + +fn make_fstring_expression<'a>( + lbrace_tok: TokenRef<'a>, + expression: Expression<'a>, + eq: Option>, + conversion_pair: Option<(TokenRef<'a>, &'a str)>, + format_pair: Option<(TokenRef<'a>, Vec>)>, + rbrace_tok: TokenRef<'a>, +) -> FormattedStringExpression<'a> { + let equal = eq.map(make_assign_equal); + let (conversion_tok, conversion) = if let Some((t, c)) = conversion_pair { + (Some(t), Some(c)) + } else { + (None, None) + }; + let (format_tok, format_spec) = if let Some((t, f)) = format_pair { + (Some(t), Some(f)) + } else { + (None, None) + }; + let after_expr_tok = if equal.is_some() { + None + } else if let Some(tok) = conversion_tok { + Some(tok) + } else if let Some(tok) = format_tok { + Some(tok) + } else { + Some(rbrace_tok) + }; + + FormattedStringExpression { + expression, + conversion, + format_spec, + whitespace_before_expression: Default::default(), + whitespace_after_expression: Default::default(), + equal, + lbrace_tok, + after_expr_tok, + } +} + +fn make_fstring<'a>( + start: &'a str, + parts: Vec>, + end: &'a str, +) -> FormattedString<'a> { + FormattedString { + start, + parts, + end, + lpar: Default::default(), + rpar: Default::default(), + } +} + +fn make_finally<'a>( + finally_tok: TokenRef<'a>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> Finally<'a> { + Finally { + body, + leading_lines: Default::default(), + whitespace_before_colon: Default::default(), + finally_tok, + colon_tok, + } +} + +fn make_except<'a>( + except_tok: TokenRef<'a>, + exp: Option>, + as_: Option<(TokenRef<'a>, Name<'a>)>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> ExceptHandler<'a> { + // TODO: AsName should come from outside + let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))); + ExceptHandler { + body, + r#type: exp, + name, + leading_lines: Default::default(), + whitespace_after_except: Default::default(), + whitespace_before_colon: Default::default(), + except_tok, + colon_tok, + } +} + +fn make_try<'a>( + try_tok: TokenRef<'a>, + body: Suite<'a>, + handlers: Vec>, + orelse: Option>, + finalbody: Option>, +) -> Try<'a> { + Try { + body, + handlers, + orelse, + finalbody, + leading_lines: Default::default(), + whitespace_before_colon: Default::default(), + try_tok, + } +} + +fn make_aug_op(tok: TokenRef) -> Result { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); + + Ok(match tok.string { + "+=" => AugOp::AddAssign { + whitespace_before, + whitespace_after, + tok, + }, + "-=" => AugOp::SubtractAssign { + whitespace_before, + whitespace_after, + tok, + }, + "*=" => AugOp::MultiplyAssign { + whitespace_before, + whitespace_after, + tok, + }, + "@=" => AugOp::MatrixMultiplyAssign { + whitespace_before, + whitespace_after, + tok, + }, + "/=" => AugOp::DivideAssign { + whitespace_before, + whitespace_after, + tok, + }, + "%=" => AugOp::ModuloAssign { + whitespace_before, + whitespace_after, + tok, + }, + "&=" => AugOp::BitAndAssign { + whitespace_before, + whitespace_after, + tok, + }, + "|=" => AugOp::BitOrAssign { + whitespace_before, + whitespace_after, + tok, + }, + "^=" => AugOp::BitXorAssign { + whitespace_before, + whitespace_after, + tok, + }, + "<<=" => AugOp::LeftShiftAssign { + whitespace_before, + whitespace_after, + tok, + }, + ">>=" => AugOp::RightShiftAssign { + whitespace_before, + whitespace_after, + tok, + }, + "**=" => AugOp::PowerAssign { + whitespace_before, + whitespace_after, + tok, + }, + "//=" => AugOp::FloorDivideAssign { + whitespace_before, + whitespace_after, + tok, + }, + _ => return Err(ParserError::OperatorError), + }) +} + +fn make_aug_assign<'a>( + target: AssignTargetExpression<'a>, + operator: AugOp<'a>, + value: Expression<'a>, +) -> AugAssign<'a> { + AugAssign { + target, + operator, + value, + semicolon: Default::default(), + } +} + +fn make_with_item<'a>( + item: Expression<'a>, + as_: Option>, + n: Option>, +) -> WithItem<'a> { + let asname = match (as_, n) { + (Some(as_), Some(n)) => Some(make_as_name(as_, n)), + (None, None) => None, + _ => panic!("as and name should be present or missing together"), + }; + WithItem { + item, + asname, + comma: Default::default(), + } +} + +fn make_with<'a>( + async_tok: Option>, + with_tok: TokenRef<'a>, + items: Vec>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> With<'a> { + let asynchronous = async_tok.as_ref().map(|_| Asynchronous { + whitespace_after: Default::default(), + }); + With { + items, + body, + asynchronous, + leading_lines: Default::default(), + whitespace_after_with: Default::default(), + whitespace_before_colon: Default::default(), + async_tok, + with_tok, + colon_tok, + } +} + +fn make_del<'a>(tok: TokenRef<'a>, target: DelTargetExpression<'a>) -> Del<'a> { + Del { + target, + whitespace_after_del: Default::default(), + semicolon: Default::default(), + tok, + } +} + +fn make_del_tuple<'a>( + lpar: Option>, + elements: Vec>, + rpar: Option>, +) -> DelTargetExpression<'a> { + DelTargetExpression::Tuple(Tuple { + elements, + lpar: lpar.map(|x| vec![x]).unwrap_or_default(), + rpar: rpar.map(|x| vec![x]).unwrap_or_default(), + }) +} + +fn make_named_expr<'a>(name: Name<'a>, tok: TokenRef<'a>, expr: Expression<'a>) -> NamedExpr<'a> { + NamedExpr { + target: Box::new(Expression::Name(name)), + value: Box::new(expr), + lpar: Default::default(), + rpar: Default::default(), + whitespace_before_walrus: Default::default(), + whitespace_after_walrus: Default::default(), + walrus_tok: tok, + } +} diff --git a/native/libcst/src/parser/mod.rs b/native/libcst/src/parser/mod.rs new file mode 100644 index 00000000..76094afb --- /dev/null +++ b/native/libcst/src/parser/mod.rs @@ -0,0 +1,6 @@ +mod errors; +mod grammar; +mod numbers; + +pub use errors::ParserError; +pub use grammar::{python, Result}; diff --git a/native/libcst/src/parser/numbers.rs b/native/libcst/src/parser/numbers.rs new file mode 100644 index 00000000..abe23065 --- /dev/null +++ b/native/libcst/src/parser/numbers.rs @@ -0,0 +1,68 @@ +use once_cell::sync::Lazy; +use regex::Regex; + +use crate::{Expression, Float, Imaginary, Integer}; + +static HEX: &str = r"0[xX](?:_?[0-9a-fA-F])+"; +static BIN: &str = r"0[bB](?:_?[01])+"; +static OCT: &str = r"0[oO](?:_?[0-7])+"; +static DECIMAL: &str = r"(?:0(?:_?0)*|[1-9](?:_?[0-9])*)"; + +static INTEGER_RE: Lazy = Lazy::new(|| { + Regex::new(format!("^({}|{}|{}|{})$", HEX, BIN, OCT, DECIMAL).as_str()).expect("regex") +}); + +static EXPONENT: &str = r"[eE][-+]?[0-9](?:_?[0-9])*"; +// Note: these don't exactly match the python implementation (exponent is not included) +static POINT_FLOAT: &str = r"([0-9](?:_?[0-9])*\.(?:[0-9](?:_?[0-9])*)?|\.[0-9](?:_?[0-9])*)"; +static EXP_FLOAT: &str = r"[0-9](?:_?[0-9])*"; + +static FLOAT_RE: Lazy = Lazy::new(|| { + Regex::new( + format!( + "^({}({})?|{}{})$", + POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT + ) + .as_str(), + ) + .expect("regex") +}); + +static IMAGINARY_RE: Lazy = Lazy::new(|| { + Regex::new( + format!( + r"^([0-9](?:_?[0-9])*[jJ]|({}({})?|{}{})[jJ])$", + POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT + ) + .as_str(), + ) + .expect("regex") +}); + +pub(crate) fn parse_number(raw: &str) -> Expression { + if INTEGER_RE.is_match(raw) { + Expression::Integer(Integer { + value: raw, + lpar: Default::default(), + rpar: Default::default(), + }) + } else if FLOAT_RE.is_match(raw) { + Expression::Float(Float { + value: raw, + lpar: Default::default(), + rpar: Default::default(), + }) + } else if IMAGINARY_RE.is_match(raw) { + Expression::Imaginary(Imaginary { + value: raw, + lpar: Default::default(), + rpar: Default::default(), + }) + } else { + Expression::Integer(Integer { + value: raw, + lpar: Default::default(), + rpar: Default::default(), + }) + } +} diff --git a/native/libcst/src/py.rs b/native/libcst/src/py.rs new file mode 100644 index 00000000..827e6b2d --- /dev/null +++ b/native/libcst/src/py.rs @@ -0,0 +1,25 @@ +use pyo3::prelude::*; + +#[pymodule] +#[pyo3(name = "native")] +pub fn libcst_native(_py: Python, m: &PyModule) -> PyResult<()> { + #[pyfn(m)] + fn parse_module(source: String, encoding: Option<&str>) -> PyResult { + let m = crate::parse_module(source.as_str(), encoding)?; + Python::with_gil(|py| Ok(m.into_py(py))) + } + + #[pyfn(m)] + fn parse_expression(source: String) -> PyResult { + let expr = crate::parse_expression(source.as_str())?; + Python::with_gil(|py| Ok(expr.into_py(py))) + } + + #[pyfn(m)] + fn parse_statement(source: String) -> PyResult { + let stm = crate::parse_statement(source.as_str())?; + Python::with_gil(|py| Ok(stm.into_py(py))) + } + + Ok(()) +} diff --git a/native/libcst/src/tokenizer/core/LICENSE b/native/libcst/src/tokenizer/core/LICENSE new file mode 100644 index 00000000..7e9199f0 --- /dev/null +++ b/native/libcst/src/tokenizer/core/LICENSE @@ -0,0 +1,46 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved" +are retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/native/libcst/src/tokenizer/core/README.md b/native/libcst/src/tokenizer/core/README.md new file mode 100644 index 00000000..dfef60f4 --- /dev/null +++ b/native/libcst/src/tokenizer/core/README.md @@ -0,0 +1,2 @@ +Files in this directory are a derivative of CPython's tokenizer, and are +therefore available under the PSF license. diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs new file mode 100644 index 00000000..7c9f0684 --- /dev/null +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -0,0 +1,1144 @@ +// This implementation is Copyright (c) Facebook, Inc. and its affiliates. +// +// CPython 3.10.0a5 and the original C code this is based on is +// Copyright (c) 2001-2021 Python Software Foundation; All Rights Reserved +// +// Portions of this module (f-string splitting) are based on parso's tokenize.py, which is also PSF +// licensed. + +/// A port of CPython's tokenizer.c to Rust, with the following significant modifications: +/// +/// - PEP 263 (encoding detection) support isn't implemented. We depend on other code to do this for +/// us right now, and expect that the input is utf-8 by the time we see it. +/// +/// - Removed support for tokenizing from a file handle without reading the whole file in at once. +/// This significantly complicates parsing and memory is cheap, so we require that the whole file +/// is read in and converted to a unicode string before tokenization can begin. +/// +/// - Removed support for the interactive interpreter parsing mode. +/// +/// - Tweaked the `translate_newlines` functionality and moved most of it into TextPosition. `\r` +/// characters are no longer removed from the input buffer, so strings may contain `\r` characters +/// that should be normalized prior to being interpreted. +/// +/// - Added support for tracking more detailed position information via TextPosition. As a +/// consequence, consuming and then backing up a character (`tok_nextc`/`tok_backup`) is more +/// expensive, and we prefer to call `TextPosition::peek()` instead. +/// +/// - Removed support for tokenizing type comments. +/// +/// - Reduced the number of different supported token types to match what parso's tokenizer yields. +/// +/// - Uses some regular expressions. Regular expression are a good fit for a tokenizer, but we don't +/// use regular expressions everywhere because we can't generate as good of error messages with +/// them. +/// +/// - Added support for breaking apart f-strings into multiple tokens, matching Parso's tokenizer +/// behavior. CPython instead runs the parser recursively to parse f-strings. +/// +/// Also, in general, the code is less tightly optimized. The CPython implementation is crazy +/// optimized in ways that wouldn't translate well to rust (e.g. it parses the input utf-8 buffer as +/// raw bytes instead of unicode codepoints). +/// +/// The implementation should still be faster than any pure-Python implementation, and most +/// optimizations (avoiding string copies when slicing) carry over to Rust very well. +/// +/// Planned (not yet implemented) features: +/// +/// - Add more feature flags to more closely match the behavior of older versions of Python 3.x. +/// +/// - Support for a Python 2 mode that tokenizes Python 2.7 code and fails on certain new Python 3 +/// syntax that wasn't supported in 2.7. +/// +/// - Maybe add back support for tokenizing type comments? +/// +/// This implementation is tailored to LibCST's needs. If you're looking for a more general-purpose +/// pure-Rust Python parser, consider using [RustPython's parser][]. +/// +/// [RustPython's parser]: https://crates.io/crates/rustpython-parser +mod string_types; + +use once_cell::sync::Lazy; +use regex::Regex; +use std::cell::RefCell; +use std::cmp::Ordering; +use std::convert::TryInto; +use std::fmt::Debug; +use std::fmt::Formatter; +use std::rc::Rc; + +use crate::tokenizer::{ + core::string_types::{FStringNode, StringQuoteChar, StringQuoteSize}, + operators::OPERATOR_RE, + text_position::{TextPosition, TextPositionSnapshot}, + whitespace_parser::State as WhitespaceState, +}; + +/// The maximum number of indentation levels at any given point in time. CPython's tokenizer.c caps +/// this to avoid the complexity of allocating a dynamic array, but we're using a Vec, so it's not +/// necessary, but we're keeping it to maintain compatibility. +const MAX_INDENT: usize = 100; + +// MAX_CHAR should be std::char::MAX once assoc_char_consts is stablized. +// https://github.com/rust-lang/rust/issues/71763 +const MAX_CHAR: char = '\u{10ffff}'; + +static SPACE_TAB_FORMFEED_RE: Lazy = Lazy::new(|| Regex::new(r"\A[ \f\t]+").expect("regex")); +static ANY_NON_NEWLINE_RE: Lazy = Lazy::new(|| Regex::new(r"\A[^\r\n]+").expect("regex")); +static STRING_PREFIX_RE: Lazy = + Lazy::new(|| Regex::new(r"\A(?i)(u|[bf]r|r[bf]|r|b|f)").expect("regex")); +static POTENTIAL_IDENTIFIER_TAIL_RE: Lazy = + Lazy::new(|| Regex::new(r"\A([a-zA-Z0-9_]|[^\x00-\x7f])+").expect("regex")); +static DECIMAL_DOT_DIGIT_RE: Lazy = Lazy::new(|| Regex::new(r"\A\.[0-9]").expect("regex")); +static DECIMAL_TAIL_RE: Lazy = + Lazy::new(|| Regex::new(r"\A[0-9](_?[0-9])*").expect("regex")); +static HEXADECIMAL_TAIL_RE: Lazy = + Lazy::new(|| Regex::new(r"\A(_?[0-9a-fA-F])+").expect("regex")); +static OCTAL_TAIL_RE: Lazy = Lazy::new(|| Regex::new(r"\A(_?[0-7])+").expect("regex")); +static BINARY_TAIL_RE: Lazy = Lazy::new(|| Regex::new(r"\A(_?[01])+").expect("regex")); + +/// Used to verify identifiers when there's a non-ascii character in them. +// This changes across unicode revisions. We'd need to ship our own unicode tables to 100% match a +// given Python version's behavior. +static UNICODE_IDENTIFIER_RE: Lazy = + Lazy::new(|| Regex::new(r"\A[\p{XID_Start}_]\p{XID_Continue}*\z").expect("regex")); + +#[derive(Debug, Eq, PartialEq, Copy, Clone)] +pub enum TokType { + String, + Name, + Number, + Op, + Newline, + Indent, + Dedent, + Async, + Await, + // TODO; add support for these + #[allow(dead_code)] + FStringStart, + #[allow(dead_code)] + FStringString, + #[allow(dead_code)] + FStringEnd, + EndMarker, +} + +#[derive(Debug, thiserror::Error, Eq, PartialEq)] +pub enum TokError<'t> { + #[error("inconsistent mixing of tabs and spaces")] + TabSpace, + #[error("too many indentation levels")] + TooDeep, + #[error("no matching outer block for dedent")] + Dedent, + #[error("unexpected characters after a line continuation")] + LineContinuation, + #[error("unexpected end of file after a line continuation")] + LineContinuationEof, + #[error("{0:?} is not a valid identifier")] + BadIdentifier(&'t str), + #[error("invalid decimal literal")] + BadDecimal, + #[error( + "{}{}", + "leading zeros in decimal integer literals are not permitted; use an 0o prefix for octal ", + "integers" + )] + BadDecimalLeadingZeros, + #[error("invalid hexadecimal literal")] + BadHexadecimal, + #[error("invalid octal literal")] + BadOctal, + #[error("invalid digit {0:?} in octal literal")] + BadOctalDigit(char), + #[error("invalid binary literal")] + BadBinary, + #[error("invalid digit {0:?} in binary literal")] + BadBinaryDigit(char), + #[error("unterminated string literal")] + UnterminatedString, + #[error("unterminated triple-quoted string literal")] + UnterminatedTripleQuotedString, + #[error("unmatched {0:?}")] + UnmatchedClosingParen(char), + #[error("Closing parenthesis {1:?} does not match opening parenthesis {0:?}")] + MismatchedClosingParen(char, char), + #[error("Closing parenthesis {1:?} does not match opening parenthesis {0:?} on line {2:}")] + MismatchedClosingParenOnLine(char, char, usize), + #[error("{0:?} is not a valid character in this position")] + BadCharacter(char), +} + +// Clone is used for async_hacks, which needs to speculatively look-ahead one token. +#[derive(Clone)] +pub struct TokState<'t> { + /// The full program's source code (similar to `tok->str` or `tok->buf` in the CPython source + /// code). We don't support reading the file line-by-line from a file handle like CPython does, + /// so this is the whole program pre-converted to utf-8. + pub text_pos: TextPosition<'t>, + /// Start of the most recently returned token. + pub start_pos: TextPositionSnapshot, + /// True after we've encountered an error or there's no more text to process. + done: bool, + /// How many spaces a tab counts as (always 8) + tab_size: usize, + /// How many spaces a tab counts as in alt_indent_stack (always 1) + alt_tab_size: usize, + /// Stack of indentation levels where a tab is counted as 8 characters, used for tracking + /// dedents. Length is current indentation level. Should never have more than MAX_INDENT + /// entries. + indent_stack: Vec, + /// Used to check that tabs and spaces are not mixed. + alt_indent_stack: Vec, + /// Beginning of line. True if at the beginning of a new line. + at_bol: bool, + /// The number of bytes at the beginning of the line, as measured by consume_bol_whitespace. + /// Used by libcst to capture (and then validate and parse) the indentation. + pub bol_width: usize, + /// Set by `consume_bol_whitespace`, true if the current line is blank. + blank_line: bool, + /// Pending intents (if > 0) or dedents (if < 0). Used when multiple tokens need to be produced + /// at once. + pending_indents: i32, + /// Length is `() [] {}` parenthesis nesting level. Used to allow free continuations inside + /// them. Stack entries are to verify that closing parenthesis match opening parenthesis. + /// Tuple is (character, lineno). + paren_stack: Vec<(char, usize)>, + /// Whether we're in a continuation line. + cont_line: bool, + + /// True if async/await aren't always keywords. + async_hacks: bool, + /// True if tokens are inside an 'async def' body. + async_def: bool, + /// Indentation level of the outermost 'async def'. + async_def_indent: usize, + /// True if the outermost 'async def' had at least one NEWLINE token after it. + async_def_nl: bool, + + /// Splits f-strings into multiple tokens instead of a STRING token if true. + /// + /// CPython doesn't directly split f-strings in the tokenizer (and therefore doesn't support + /// this option). Instead, when the parser encounters an f-string, it recursively re-runs the + /// tokenizer and parser. + /// + /// Supporting this at the tokenizer-level is pretty nasty and adds a lot of complexity. + /// Eventually, we should probably support this at the parser-level instead. + split_fstring: bool, + fstring_stack: Vec, + + missing_nl_before_eof: bool, +} + +pub struct TokConfig { + /// Used in Python 3.5 and 3.6. If enabled, async/await are sometimes keywords and sometimes + /// identifiers, depending on if they're being used in the context of an async function. This + /// breaks async comprehensions outside of async functions. + pub async_hacks: bool, + pub split_fstring: bool, + // Not currently supported: + // type_comments: bool, +} + +fn is_digit>>(ch: C) -> bool { + matches!(ch.into(), Some('0'..='9')) +} + +#[derive(Debug)] +enum NumberState { + StartDigit, + Fraction, + Exponent, + Imaginary, +} + +impl<'t> TokState<'t> { + pub fn new(text: &'t str, config: &TokConfig) -> Self { + let text_pos = TextPosition::new(text); + let start_pos = (&text_pos).into(); + Self { + text_pos, + start_pos, + done: false, + tab_size: 8, + alt_tab_size: 1, + indent_stack: Vec::new(), + alt_indent_stack: Vec::new(), + at_bol: true, + bol_width: 0, + blank_line: false, + pending_indents: 0, + paren_stack: Vec::new(), + cont_line: false, + async_hacks: config.async_hacks, + async_def: false, + async_def_indent: 0, + async_def_nl: false, + split_fstring: config.split_fstring, + fstring_stack: Vec::new(), + missing_nl_before_eof: text.is_empty() || text.as_bytes()[text.len() - 1] != b'\n', + } + } + + pub fn is_parenthesized(&self) -> bool { + !self.paren_stack.is_empty() + } + + /// Implementation of `next()`, wrapped by next() to allow for easier error handling. Roughly + /// equivalent to `tok_get` in the C source code. + fn next_inner(&mut self) -> Result> { + if self.split_fstring { + if let Some(tos) = self.fstring_stack.last() { + if !tos.is_in_expr() { + self.start_pos = (&self.text_pos).into(); + let is_in_format_spec = tos.is_in_format_spec(); + let is_raw_string = tos.is_raw_string; + if let Some(tok) = + self.maybe_consume_fstring_string(is_in_format_spec, is_raw_string)? + { + return Ok(tok); + } + if let Some(tok) = self.maybe_consume_fstring_end() { + return Ok(tok); + } + } + } + } + + // This will never consume a token, but it may set blank_line and it may set + // pending_indents. + self.consume_bol_whitespace()?; + + // Return pending indents/dedents + if let Some(t) = self.process_pending_indents() { + self.start_pos = (&self.text_pos).into(); + return Ok(t); + } + + self.maybe_close_async_def(); + + 'again: loop { + // Skip spaces + self.text_pos.consume(&*SPACE_TAB_FORMFEED_RE); + + // Skip comment, unless it's a type comment + if self.text_pos.peek() == Some('#') { + self.text_pos.consume(&*ANY_NON_NEWLINE_RE); + // type_comment is not supported + } + + // Set start of current token + self.start_pos = (&self.text_pos).into(); + + return match self.text_pos.peek() { + // Check for EOF now + None => { + if self.missing_nl_before_eof + && self.text_pos.byte_column_number() != self.bol_width + && !self.blank_line + { + self.at_bol = true; + self.missing_nl_before_eof = false; + Ok(TokType::Newline) + } else { + Ok(TokType::EndMarker) + } + } + + // Identifier (most frequent token!) + Some('a'..='z') | Some('A'..='Z') | Some('_') | Some('\u{80}'..=MAX_CHAR) => { + self.consume_identifier_or_prefixed_string() + } + + // Newline + Some('\n') => { + self.text_pos.next(); + self.at_bol = true; + if self.split_fstring + && !self.fstring_stack.iter().all(|node| node.allow_multiline()) + { + Err(TokError::UnterminatedString) + } else if self.blank_line || !self.paren_stack.is_empty() { + // this newline doesn't count + // recurse (basically `goto nextline`) + self.next_inner() + } else { + self.cont_line = false; + if self.async_def { + self.async_def_nl = true; + } + Ok(TokType::Newline) + } + } + + // Ellipsis + Some('.') if self.text_pos.consume("...") => { + return Ok(TokType::Op); + } + + // Number starting with period + Some('.') if self.text_pos.matches(&*DECIMAL_DOT_DIGIT_RE) => { + self.consume_number(NumberState::Fraction) + } + + // Dot + Some('.') => { + self.text_pos.next(); + Ok(TokType::Op) + } + + // Number + Some('0'..='9') => self.consume_number(NumberState::StartDigit), + + // String + Some('\'') | Some('"') => self.consume_string(), + + // Line continuation + Some('\\') => { + self.text_pos.next(); + if let Some('\n') = self.text_pos.next() { + if self.text_pos.peek() == None { + Err(TokError::LineContinuationEof) + } else { + self.cont_line = true; + // Read next line + continue 'again; + } + } else { + Err(TokError::LineContinuation) + } + } + + Some(ch @ '(') | Some(ch @ '[') | Some(ch @ '{') => { + self.text_pos.next(); + if let Some(tos) = self.fstring_stack.last_mut() { + tos.open_parentheses(); + } + self.paren_stack.push((ch, self.text_pos.line_number())); + Ok(TokType::Op) + } + + Some(closing @ ')') | Some(closing @ ']') | Some(closing @ '}') => { + self.text_pos.next(); + if let Some(tos) = self.fstring_stack.last_mut() { + tos.close_parentheses(); + } + if let Some((opening, line_number)) = self.paren_stack.pop() { + match (opening, closing) { + ('(', ')') | ('[', ']') | ('{', '}') => Ok(TokType::Op), + _ => { + if line_number != self.text_pos.line_number() { + Err(TokError::MismatchedClosingParenOnLine( + opening, + closing, + line_number, + )) + } else { + Err(TokError::MismatchedClosingParen(opening, closing)) + } + } + } + } else { + Err(TokError::UnmatchedClosingParen(closing)) + } + } + + Some(':') + if self + .fstring_stack + .last() + .map(|tos| tos.parentheses_count - tos.format_spec_count == 1) + .unwrap_or(false) => + { + // N.B. This may capture the walrus operator and pass it to the formatter. + // That's intentional. PEP 572 says: "Assignment expressions inside of f-strings + // require parentheses." + // + // >>> f'{x:=10}' # Valid, passes '=10' to formatter + let tos = self + .fstring_stack + .last_mut() + .expect("fstring_stack is not empty"); + tos.format_spec_count += 1; + self.text_pos.next(); + Ok(TokType::Op) + } + + // Operator + Some(_) if self.text_pos.consume(&*OPERATOR_RE) => Ok(TokType::Op), + + // Bad character + // If nothing works, fall back to this error. CPython returns an OP in this case, + // and then just relies on the parser to generate a generic syntax error. + Some(ch) => Err(TokError::BadCharacter(ch)), + }; + } + } + + /// Consumes the whitespace (and comments) at the beginning of the line. May emit an error. Will + /// mutate `pending_indents`, so you must check `pending_indents` after calling this. + fn consume_bol_whitespace(&mut self) -> Result<(), TokError<'t>> { + self.blank_line = false; + if !self.at_bol { + return Ok(()); + } + + let mut col = 0; // column where tab counts as 8 characters + let mut altcol = 0; // column where tab counts as 1 character + self.at_bol = false; + self.bol_width = 0; + + // consume space, tab, and formfeed characters + loop { + match self.text_pos.peek() { + Some(' ') => { + col += 1; + altcol += 1; + self.bol_width += 1; + self.text_pos.next(); + } + Some('\t') => { + // Increment both col and altcol using different tab sizes. Tabs snap to the + // next multiple of self.tab_size. + col = (col / self.tab_size + 1) * self.tab_size; + // altcol will later be used for detecting mixed tabs and spaces. + altcol = (altcol / self.alt_tab_size + 1) * self.alt_tab_size; + self.bol_width += 1; + self.text_pos.next(); + } + // Control-L (formfeed) for emacs users + Some('\x0c') => { + col = 0; + altcol = 0; + self.bol_width += 1; + self.text_pos.next(); + } + _ => { + break; + } + } + } + + // Lines with only whitespace and/or comments and/or a line continuation character shouldn't + // affect the indentation and are not passed to the parser as NEWLINE tokens. + self.blank_line = matches!(self.text_pos.peek(), Some('#') | Some('\n') | Some('\\')); + + if self.blank_line || !self.paren_stack.is_empty() { + return Ok(()); + } + + let prev_col = self.indent_stack.last().unwrap_or(&0); + match col.cmp(prev_col) { + Ordering::Equal => { + // No change + if altcol != *self.alt_indent_stack.last().unwrap_or(&0) { + return Err(TokError::TabSpace); + } + } + Ordering::Greater => { + // col > prev_col + // Indent -- always one + if self.indent_stack.len() + 1 >= MAX_INDENT { + return Err(TokError::TooDeep); + } + // col > prev_col, therefore altcol > prev_altcol, unless there's badly mixed tabs + // and spaces + if altcol <= *self.alt_indent_stack.last().unwrap_or(&0) { + return Err(TokError::TabSpace); + } + // only emit indents if we're not at EOF + if self.text_pos.peek().is_some() { + self.pending_indents += 1; + self.indent_stack.push(col); + self.alt_indent_stack.push(altcol); + } + } + Ordering::Less => { + // c < prev_col + // Dedent -- any number, must be consistent + while matches!(self.indent_stack.last(), Some(&ind_cols) if col < ind_cols) { + self.pending_indents -= 1; + self.indent_stack.pop(); + self.alt_indent_stack.pop(); + } + if col != *self.indent_stack.last().unwrap_or(&0) { + return Err(TokError::Dedent); + } + if altcol != *self.alt_indent_stack.last().unwrap_or(&0) { + return Err(TokError::TabSpace); + } + } + } + + Ok(()) + } + + fn process_pending_indents(&mut self) -> Option { + if self.pending_indents != 0 { + if self.pending_indents < 0 { + self.pending_indents += 1; + Some(TokType::Dedent) + } else { + self.pending_indents -= 1; + Some(TokType::Indent) + } + } else { + None + } + } + + fn maybe_close_async_def(&mut self) { + // Check if we are closing an async function + if self.async_def + && !self.blank_line + // (This is irrelevant to the rust implementation which doesn't support type_comments + // yet, but the comment is preserved for posterity) + // Due to some implementation artifacts of type comments, a TYPE_COMMENT at the start of + // a function won't set an indentation level and it will produce a NEWLINE after it. To + // avoid spuriously ending an async function due to this, wait until we have some + // non-newline char in front of us. + // && self.text_pos.peek() == Some('\n') + && self.paren_stack.is_empty() + // There was a NEWLINE after ASYNC DEF, so we're past the signature. + && self.async_def_nl + // Current indentation level is less than where the async function was defined + && self.async_def_indent >= self.indent_stack.len() + { + self.async_def = false; + self.async_def_indent = 0; + self.async_def_nl = false; + } + } + + fn consume_identifier_or_prefixed_string(&mut self) -> Result> { + // Process the various legal combinations of b"", r"", u"", and f"". + if self.text_pos.consume(&*STRING_PREFIX_RE) { + if let Some('"') | Some('\'') = self.text_pos.peek() { + // We found a string, not an identifier. Bail! + if self.split_fstring + && self + .text_pos + .slice_from_start_pos(&self.start_pos) + .contains(&['f', 'F'][..]) + { + return self.consume_fstring_start(); + } else { + return self.consume_string(); + } + } + } else { + // the next character must be a potential identifier start, aka `[a-zA-Z_]|[^\x00-\x7f]` + let first_ch = self.text_pos.next(); + debug_assert!(matches!( + first_ch, + Some('a'..='z') | Some('A'..='Z') | Some('_') | Some('\u{80}'..=MAX_CHAR) + )); + } + self.text_pos.consume(&*POTENTIAL_IDENTIFIER_TAIL_RE); + let identifier_str = self.text_pos.slice_from_start_pos(&self.start_pos); + if !verify_identifier(identifier_str) { + // TODO: async/await + return Err(TokError::BadIdentifier(identifier_str)); + } + + let allow_async = !self.async_hacks || self.async_def; + match (identifier_str, allow_async) { + ("async", true) => Ok(TokType::Async), + ("await", true) => Ok(TokType::Await), + ("async", false) => { + // The current token is 'async' and async_hacks is enabled. + // Look ahead one token to see if that is 'def'. + // This clone is expensive, but modern code doesn't need async_hacks. + let mut lookahead_state = self.clone(); + if lookahead_state.next_inner() == Ok(TokType::Name) + && lookahead_state + .text_pos + .slice_from_start_pos(&lookahead_state.start_pos) + == "def" + { + self.async_def = true; + self.async_def_indent = self.indent_stack.len(); + Ok(TokType::Async) + } else { + Ok(TokType::Name) + } + } + _ => Ok(TokType::Name), + } + } + + fn consume_number(&mut self, state: NumberState) -> Result> { + // This is organized as a state machine. The match could also be rewritten into multiple + // functions, but this is closer to how the C code is written (with gotos). + match state { + NumberState::StartDigit => { + let start_digit_ch = self.text_pos.peek(); + debug_assert!(is_digit(start_digit_ch)); + + if start_digit_ch == Some('0') { + self.text_pos.next(); + match self.text_pos.peek() { + Some('x') | Some('X') => { + self.text_pos.next(); + if !self.text_pos.consume(&*HEXADECIMAL_TAIL_RE) + || self.text_pos.peek() == Some('_') + { + Err(TokError::BadHexadecimal) + } else { + Ok(TokType::Number) + } + } + Some('o') | Some('O') => { + self.text_pos.next(); + if !self.text_pos.consume(&*OCTAL_TAIL_RE) + || self.text_pos.peek() == Some('_') + { + return Err(TokError::BadOctal); + } + if let Some(next_ch) = self.text_pos.peek() { + if is_digit(next_ch) { + return Err(TokError::BadOctalDigit(next_ch)); + } + } + Ok(TokType::Number) + } + Some('b') | Some('B') => { + self.text_pos.next(); + if !self.text_pos.consume(&*BINARY_TAIL_RE) + || self.text_pos.peek() == Some('_') + { + return Err(TokError::BadBinary); + } + if let Some(next_ch) = self.text_pos.peek() { + if is_digit(next_ch) { + return Err(TokError::BadBinaryDigit(next_ch)); + } + } + Ok(TokType::Number) + } + _ => { + let mut nonzero = false; + // Maybe old-style octal. In any case, allow '0' as a literal + loop { + if self.text_pos.peek() == Some('_') { + self.text_pos.next(); + if !is_digit(self.text_pos.peek()) { + return Err(TokError::BadDecimal); + } + } + if self.text_pos.peek() != Some('0') { + break; + } + self.text_pos.next(); + } + if is_digit(self.text_pos.peek()) { + nonzero = true; + self.consume_decimal_tail()?; + } + if self.text_pos.peek() == Some('.') { + self.consume_number(NumberState::Fraction) + } else if let Some('e') | Some('E') = self.text_pos.peek() { + self.consume_number(NumberState::Exponent) + } else if let Some('j') | Some('J') = self.text_pos.peek() { + self.consume_number(NumberState::Imaginary) + } else if nonzero { + Err(TokError::BadDecimalLeadingZeros) + } else { + Ok(TokType::Number) + } + } + } + } else { + self.consume_decimal_tail()?; + if self.text_pos.peek() == Some('.') { + self.consume_number(NumberState::Fraction) + } else if let Some('e') | Some('E') = self.text_pos.peek() { + self.consume_number(NumberState::Exponent) + } else if let Some('j') | Some('J') = self.text_pos.peek() { + self.consume_number(NumberState::Imaginary) + } else { + Ok(TokType::Number) + } + } + } + NumberState::Fraction => { + let dot_ch = self.text_pos.next(); + debug_assert!(dot_ch == Some('.')); + + if is_digit(self.text_pos.peek()) { + self.consume_decimal_tail()?; + } + if let Some('e') | Some('E') = self.text_pos.peek() { + self.consume_number(NumberState::Exponent) + } else if let Some('j') | Some('J') = self.text_pos.peek() { + self.consume_number(NumberState::Imaginary) + } else { + Ok(TokType::Number) + } + } + NumberState::Exponent => { + let e_ch = self.text_pos.next(); + debug_assert!(matches!(e_ch, Some('e') | Some('E'))); + + if let Some('+') | Some('-') = self.text_pos.peek() { + self.text_pos.next(); + if !is_digit(self.text_pos.peek()) { + return Err(TokError::BadDecimal); + } + } else if !is_digit(self.text_pos.peek()) { + // Don't consume the 'e'. It could be part of an identifier after this number. + self.text_pos.backup_no_newline(); + return Ok(TokType::Number); + } + self.consume_decimal_tail()?; + if let Some('j') | Some('J') = self.text_pos.peek() { + self.consume_number(NumberState::Imaginary) + } else { + Ok(TokType::Number) + } + } + NumberState::Imaginary => { + let j_ch = self.text_pos.next(); + debug_assert!(matches!(j_ch, Some('j') | Some('J'))); + + Ok(TokType::Number) + } + } + } + + /// Processes a decimal tail. This is the bit after the dot or after an E in a float. + fn consume_decimal_tail(&mut self) -> Result<(), TokError<'t>> { + let result = self.text_pos.consume(&*DECIMAL_TAIL_RE); + // Assumption: If we've been called, the first character is an integer, so we must have a + // regex match + debug_assert!(result, "try_decimal_tail was called on a non-digit char"); + if self.text_pos.peek() == Some('_') { + Err(TokError::BadDecimal) + } else { + Ok(()) + } + } + + fn consume_open_quote(&mut self) -> (StringQuoteChar, StringQuoteSize) { + let quote_char: StringQuoteChar = self + .text_pos + .peek() + .try_into() + .expect("the next character must be a quote when calling consume_open_quote"); + let triple_quote_pattern = quote_char.triple_str(); + let quote_size = if self.text_pos.consume(triple_quote_pattern) { + StringQuoteSize::Triple + } else { + self.text_pos.next(); // consume the single character instead + StringQuoteSize::Single + }; + (quote_char, quote_size) + } + + fn consume_string(&mut self) -> Result> { + // Assumption: The opening quote has not been consumed. Leading characters (b, r, f, etc) + // have been consumed. + let (quote_char, quote_size) = self.consume_open_quote(); + let quote_raw = quote_char.into(); + + let mut end_quote_size: usize = 0; + let quote_usize: usize = quote_size.into(); + while end_quote_size != quote_usize { + match (self.text_pos.next(), quote_size) { + (None, StringQuoteSize::Triple) => { + return Err(TokError::UnterminatedTripleQuotedString); + } + (None, StringQuoteSize::Single) | (Some('\n'), StringQuoteSize::Single) => { + return Err(TokError::UnterminatedString); + } + (ch @ Some('\''), _) | (ch @ Some('"'), _) if ch == Some(quote_raw) => { + end_quote_size += 1; + } + (Some(ch), _) => { + end_quote_size = 0; + if ch == '\\' { + // skip escaped char + self.text_pos.next(); + } + } + } + } + + Ok(TokType::String) + } + + fn consume_fstring_start(&mut self) -> Result> { + let (quote_char, quote_size) = self.consume_open_quote(); + let is_raw_string = self + .text_pos + .slice_from_start_pos(&self.start_pos) + .contains(&['r', 'R'][..]); + self.fstring_stack + .push(FStringNode::new(quote_char, quote_size, is_raw_string)); + Ok(TokType::FStringStart) + } + + fn maybe_consume_fstring_string( + &mut self, + is_in_format_spec: bool, + is_raw_string: bool, + ) -> Result, TokError<'t>> { + let allow_multiline = self.fstring_stack.iter().all(|node| node.allow_multiline()); + let mut in_named_unicode: bool = false; + let mut ok_result = Ok(None); // value to return if we reach the end and don't error out + 'outer: loop { + match (self.text_pos.peek(), allow_multiline) { + (None, true) => { + return Err(TokError::UnterminatedTripleQuotedString); + } + (None, false) | (Some('\n'), false) => { + return Err(TokError::UnterminatedString); + } + (ch @ Some('\''), _) | (ch @ Some('"'), _) => { + // see if this actually terminates something in fstring_stack + for node in self.fstring_stack.iter() { + if ch == Some(node.quote_char.into()) { + match node.quote_size { + StringQuoteSize::Single => { + break 'outer; + } + StringQuoteSize::Triple => { + if self.text_pos.matches(node.quote_char.triple_str()) { + break 'outer; + } + } + } + } + } + self.text_pos.next(); + } + (Some('\\'), _) if !is_raw_string => { + self.text_pos.next(); + if is_in_format_spec { + if let Some('{') | Some('}') = self.text_pos.peek() { + // don't consume { or } because we want those to be interpreted as OP + // tokens + } else { + // skip escaped char (e.g. \', \", or newline/line continuation) + self.text_pos.next(); + } + } else { + // skip escaped char + let next_ch = self.text_pos.next(); + // check if this is a \N sequence + if let Some('N') = next_ch { + // swallow the next open curly brace if it exists + if let Some('{') = self.text_pos.peek() { + in_named_unicode = true; + self.text_pos.next(); + } + } + } + } + (Some('{'), _) => { + if is_in_format_spec { + // don't actually consume the {, and generate an OP for it instead + break 'outer; + } + let consumed_double = self.text_pos.consume("{{"); + if !consumed_double { + break 'outer; + } + } + (Some('}'), _) => { + if in_named_unicode { + in_named_unicode = false; + self.text_pos.next(); + } else if is_in_format_spec { + // don't actually consume the }, and generate an OP for it instead + break 'outer; + } else if !self.text_pos.consume("}}") { + return Err(TokError::UnmatchedClosingParen('}')); + } + } + _ => { + self.text_pos.next(); + } + } + ok_result = Ok(Some(TokType::FStringString)); + } + ok_result + } + + fn maybe_consume_fstring_end(&mut self) -> Option { + let ch = self.text_pos.peek(); + let mut match_idx = None; + for (idx, node) in self.fstring_stack.iter().enumerate() { + if ch == Some(node.quote_char.into()) { + if node.quote_size == StringQuoteSize::Triple { + if self.text_pos.consume(node.quote_char.triple_str()) { + match_idx = Some(idx); + break; + } + } else { + self.text_pos.next(); // already matched + match_idx = Some(idx); + break; + } + } + } + if let Some(match_idx) = match_idx { + self.fstring_stack.truncate(match_idx); + Some(TokType::FStringEnd) + } else { + None + } + } +} + +impl<'t> Iterator for TokState<'t> { + type Item = Result>; + + /// Returns the next token type. + fn next(&mut self) -> Option>> { + // This implementation wraps `next_inner`, which does the actual work. + if self.done { + None + } else { + match self.next_inner() { + Err(err) => { + self.done = true; + Some(Err(err)) + } + Ok(TokType::EndMarker) => { + self.done = true; + Some(Ok(TokType::EndMarker)) + } + Ok(t) => Some(Ok(t)), + } + } + } +} + +/// Returns true if the given string is a valid Python 3.x identifier. Follows [PEP 3131][]. +/// +/// [PEP 3131]: https://www.python.org/dev/peps/pep-3131/ +fn verify_identifier(name: &str) -> bool { + // TODO: If `name` is non-ascii, must first normalize name to NFKC. + // Common case: If the entire string is ascii, we can avoid the more expensive regex check, + // since the tokenizer already validates ascii characters before calling us. + name.is_ascii() || UNICODE_IDENTIFIER_RE.is_match(name) +} + +#[derive(Clone)] +pub struct Token<'a> { + pub r#type: TokType, + pub string: &'a str, + pub start_pos: TextPositionSnapshot, + pub end_pos: TextPositionSnapshot, + pub whitespace_before: Rc>>, + pub whitespace_after: Rc>>, + pub relative_indent: Option<&'a str>, +} + +impl<'a> Debug for Token<'a> { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { + write!( + f, + "Token({:?}, {}, start={:?}, end={:?}, relative_indent={:?}, ws_before={:?}, ws_after={:?}", + self.r#type, self.string, self.start_pos, self.end_pos, self.relative_indent, self.whitespace_before, self.whitespace_after + ) + } +} + +// Dummy Eq implementation. We never compare Tokens like this +impl<'a> PartialEq for Token<'a> { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +impl<'a> Eq for Token<'a> {} + +pub struct TokenIterator<'a> { + previous_whitespace: Option>>>, + core_state: TokState<'a>, + absolute_indents: Vec<&'a str>, +} + +impl<'a> TokenIterator<'a> { + pub fn new(module_text: &'a str, config: &TokConfig) -> Self { + Self { + previous_whitespace: None, + absolute_indents: vec![], + core_state: TokState::new(module_text, config), + } + } +} + +impl<'a> Iterator for TokenIterator<'a> { + type Item = Result, TokError<'a>>; + + fn next(&mut self) -> Option { + let next = self.core_state.next(); + next.as_ref()?; + Some((|| { + let tok_type = next.unwrap()?; + let relative_indent = match tok_type { + TokType::Indent => { + let end_idx = self.core_state.text_pos.byte_idx(); + let start_idx = end_idx - self.core_state.bol_width; + let absolute_indent = &self.core_state.text_pos.text()[start_idx..end_idx]; + let relative_indent = + if let Some(prev_absolute_indent) = self.absolute_indents.last() { + if let Some(ri) = absolute_indent.strip_prefix(prev_absolute_indent) { + ri + } else { + // TODO: return the correct exception type, improve error message + return Err(TokError::Dedent); + } + } else { + // there's no previous indent, absolute_indent is relative_indent + absolute_indent + }; + self.absolute_indents.push(absolute_indent); + // HACKY: mutate and fixup the previous whitespace state + if let Some(ws) = self.previous_whitespace.as_mut() { + ws.borrow_mut().absolute_indent = absolute_indent; + } + Some(relative_indent) + } + TokType::Dedent => { + self.absolute_indents.pop(); + // HACKY: mutate and fixup the previous whitespace state + if let Some(ws) = self.previous_whitespace.as_mut() { + ws.borrow_mut().absolute_indent = + self.absolute_indents.last().unwrap_or(&""); + } + None + } + _ => None, + }; + let text_pos = &self.core_state.text_pos; + let whitespace_before = self.previous_whitespace.clone().unwrap_or_default(); + let whitespace_after = match tok_type { + TokType::Indent | TokType::Dedent | TokType::EndMarker => whitespace_before.clone(), + _ => Rc::new(RefCell::new(WhitespaceState { + line: text_pos.line_number(), + column: text_pos.char_column_number(), + column_byte: text_pos.byte_column_number(), + byte_offset: text_pos.byte_idx(), + absolute_indent: self.absolute_indents.last().unwrap_or(&""), + is_parenthesized: self.core_state.is_parenthesized(), + })), + }; + self.previous_whitespace = Some(whitespace_after.clone()); + + Ok(Token { + r#type: tok_type, + string: text_pos.slice_from_start_pos(&self.core_state.start_pos), + start_pos: self.core_state.start_pos.clone(), + end_pos: text_pos.into(), + whitespace_after: whitespace_after.clone(), + whitespace_before: whitespace_before.clone(), + relative_indent, + }) + })()) + } +} diff --git a/native/libcst/src/tokenizer/core/string_types.rs b/native/libcst/src/tokenizer/core/string_types.rs new file mode 100644 index 00000000..0d14d1e8 --- /dev/null +++ b/native/libcst/src/tokenizer/core/string_types.rs @@ -0,0 +1,119 @@ +// This implementation is Copyright (c) Facebook, Inc. and its affiliates. +// +// CPython 3.10.0a5 and the original C code this is based on is +// Copyright (c) 2001-2021 Python Software Foundation; All Rights Reserved +// +// Portions of this module (f-string splitting) are based on parso's tokenize.py, which is also PSF +// licensed. + +/// Helper types for string processing in the core tokenizer. +use std::convert::TryFrom; + +use crate::tokenizer::text_position::TextPositionSnapshot; + +#[derive(Clone, Copy, Eq, PartialEq)] +pub enum StringQuoteSize { + Single, + Triple, +} + +impl From for usize { + fn from(qs: StringQuoteSize) -> Self { + match qs { + StringQuoteSize::Single => 1, + StringQuoteSize::Triple => 3, + } + } +} + +#[derive(Clone, Copy)] +pub enum StringQuoteChar { + Apostrophe, + DoubleQuote, +} + +impl StringQuoteChar { + pub fn triple_str(&self) -> &'static str { + match self { + Self::Apostrophe => "'''", + Self::DoubleQuote => "\"\"\"", + } + } +} + +impl From for char { + fn from(ch: StringQuoteChar) -> Self { + match ch { + StringQuoteChar::Apostrophe => '\'', + StringQuoteChar::DoubleQuote => '"', + } + } +} + +#[derive(Debug, thiserror::Error)] +#[error("{0:?} is not a valid string quote character")] +pub struct StringQuoteCharConversionError(Option); + +impl TryFrom> for StringQuoteChar { + type Error = StringQuoteCharConversionError; + + fn try_from(ch: Option) -> Result { + match ch { + Some('\'') => Ok(StringQuoteChar::Apostrophe), + Some('"') => Ok(StringQuoteChar::DoubleQuote), + _ => Err(StringQuoteCharConversionError(ch)), + } + } +} + +#[derive(Clone)] +pub struct FStringNode { + pub quote_char: StringQuoteChar, + pub quote_size: StringQuoteSize, + pub parentheses_count: usize, + pub string_start: Option, + // In the syntax there can be multiple format_spec's nested: {x:{y:3}} + pub format_spec_count: usize, + pub is_raw_string: bool, +} + +impl FStringNode { + pub fn new( + quote_char: StringQuoteChar, + quote_size: StringQuoteSize, + is_raw_string: bool, + ) -> Self { + Self { + quote_char, + quote_size, + parentheses_count: 0, + string_start: None, + format_spec_count: 0, + is_raw_string, + } + } + + pub fn open_parentheses(&mut self) { + self.parentheses_count += 1; + } + + pub fn close_parentheses(&mut self) { + self.parentheses_count -= 1; + if self.parentheses_count == 0 { + // No parentheses means that the format spec is also finished. + self.format_spec_count = 0; + } + } + + pub fn allow_multiline(&self) -> bool { + self.quote_size == StringQuoteSize::Triple + } + + pub fn is_in_expr(&self) -> bool { + self.parentheses_count > self.format_spec_count + } + + pub fn is_in_format_spec(&self) -> bool { + !self.is_in_expr() && self.format_spec_count > 0 + } +} diff --git a/native/libcst/src/tokenizer/debug_utils.rs b/native/libcst/src/tokenizer/debug_utils.rs new file mode 100644 index 00000000..8e646f8d --- /dev/null +++ b/native/libcst/src/tokenizer/debug_utils.rs @@ -0,0 +1,16 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::fmt; + +/// An empty struct that when writes "..." when using `fmt::Debug`. Useful for omitting fields when +/// using `fmt::Formatter::debug_struct`. +pub struct EllipsisDebug; + +impl fmt::Debug for EllipsisDebug { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("...") + } +} diff --git a/native/libcst/src/tokenizer/mod.rs b/native/libcst/src/tokenizer/mod.rs new file mode 100644 index 00000000..e900f511 --- /dev/null +++ b/native/libcst/src/tokenizer/mod.rs @@ -0,0 +1,15 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +mod core; +mod debug_utils; +mod operators; +mod text_position; +pub mod whitespace_parser; + +pub use self::core::*; + +#[cfg(test)] +mod tests; diff --git a/native/libcst/src/tokenizer/operators.rs b/native/libcst/src/tokenizer/operators.rs new file mode 100644 index 00000000..e3098cfa --- /dev/null +++ b/native/libcst/src/tokenizer/operators.rs @@ -0,0 +1,85 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. +// +// Part of this file is derived from the CPython documentation, which is available under the +// zero-clause BSD license. That license does not require that derivative works cite the original +// code or that we retain the original work's copyright information. +// https://docs.python.org/3/license.html#zero-clause-bsd-license-for-code-in-the-python-release-documentation + +use once_cell::sync::Lazy; +use regex::Regex; + +/// A list of strings that make up all the possible operators in a specific version of Python. +/// Derived from the [CPython's token documentation](https://docs.python.org/3/library/token.html). +pub const OPERATORS: &[&str] = &[ + "(", // LPAR + ")", // RPAR + "[", // LSQB + "]", // RSQB + ":", // COLON + ",", // COMMA + ";", // SEMI + "+", // PLUS + "-", // MINUS + "*", // STAR + "/", // SLASH + "|", // VBAR + "&", // AMPER + "<", // LESS + ">", // GREATER + "=", // EQUAL + ".", // DOT + "%", // PERCENT + "{", // LBRACE + "}", // RBRACE + "==", // EQEQUAL + "!=", // NOTEQUAL + "<=", // LESSEQUAL + ">=", // GREATEREQUAL + "~", // TILDE + "^", // CIRCUMFLEX + "<<", // LEFTSHIFT + ">>", // RIGHTSHIFT + "**", // DOUBLESTAR + "+=", // PLUSEQUAL + "-=", // MINEQUAL + "*=", // STAREQUAL + "/=", // SLASHEQUAL + "%=", // PERCENTEQUAL + "&=", // AMPEREQUAL + "|=", // VBAREQUAL + "^=", // CIRCUMFLEXEQUAL + "<<=", // LEFTSHIFTEQUAL + ">>=", // RIGHTSHIFTEQUAL + "**=", // DOUBLESTAREQUAL + "//", // DOUBLESLASH + "//=", // DOUBLESLASHEQUAL + "@", // AT + "@=", // ATEQUAL + "->", // RARROW + "...", // ELLIPSIS + ":=", // COLONEQUAL + // Not a real operator, but needed to support the split_fstring feature + "!", + // The fake operator added by PEP 401. Technically only valid if used with: + // + // from __future__ import barry_as_FLUFL + "<>", +]; + +pub static OPERATOR_RE: Lazy = Lazy::new(|| { + // sort operators so that we try to match the longest ones first + let mut sorted_operators: Box<[&str]> = OPERATORS.into(); + sorted_operators.sort_unstable_by_key(|op| usize::MAX - op.len()); + Regex::new(&format!( + r"\A({})", + sorted_operators + .iter() + .map(|op| regex::escape(op)) + .collect::>() + .join("|") + )) + .expect("regex") +}); diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs new file mode 100644 index 00000000..d839e815 --- /dev/null +++ b/native/libcst/src/tokenizer/tests.rs @@ -0,0 +1,689 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +/// Tests for the functionality in `tokenize::core`. These tests are not part of the `core` module +/// because they're not a derivative work of CPython, and are therefore not subject to the PSF +/// license. +use crate::tokenizer::core::{TokConfig, TokError, TokState, TokType}; + +fn default_config() -> TokConfig { + TokConfig { + async_hacks: false, + split_fstring: false, + } +} + +fn tokenize_with_end_marker<'t>( + text: &'t str, + config: &TokConfig, +) -> Result, TokError<'t>> { + let mut result = Vec::new(); + let mut state = TokState::new(text, config); + while let Some(tok_type) = state.next() { + result.push(( + tok_type?, + state.text_pos.slice_from_start_pos(&state.start_pos), + )); + } + Ok(result) +} + +fn tokenize_all<'t>( + text: &'t str, + config: &TokConfig, +) -> Result, TokError<'t>> { + let mut result = tokenize_with_end_marker(text, config)?; + // Remove the EndMarker, since it's on every non-error token stream. + assert_eq!(result.pop().expect("EndMarker").0, TokType::EndMarker); + // Also remove fake newline at the end + if let Some((TokType::Newline, "")) = result.last() { + result.pop(); + } + Ok(result) +} + +#[test] +fn test_indentifier() { + assert_eq!( + tokenize_all("test input", &default_config()), + Ok(vec![(TokType::Name, "test"), (TokType::Name, "input")]) + ); + + assert_eq!( + tokenize_all("__with_underscores", &default_config()), + Ok(vec![(TokType::Name, "__with_underscores")]) + ); + + assert_eq!( + tokenize_all("{ends_with_op}", &default_config()), + Ok(vec![ + (TokType::Op, "{"), + (TokType::Name, "ends_with_op"), + (TokType::Op, "}") + ]) + ); + + assert_eq!( + tokenize_all("\u{0100}\u{0101}\u{0102}unicode", &default_config()), + Ok(vec![(TokType::Name, "\u{0100}\u{0101}\u{0102}unicode")]) + ); +} + +#[test] +fn test_async_await() { + // normally async/await are keywords + assert_eq!( + tokenize_all("async await", &default_config()), + Ok(vec![(TokType::Async, "async"), (TokType::Await, "await")]) + ); + + // with async_hacks, async/await are handled as identifiers by default + assert_eq!( + tokenize_all( + "async await", + &TokConfig { + async_hacks: true, + ..default_config() + } + ), + Ok(vec![(TokType::Name, "async"), (TokType::Name, "await")]) + ); + + // with async_hacks, async/await are handled as keywords in functions + assert_eq!( + tokenize_all( + "async def fn():\n await foo\nawait bar", + &TokConfig { + async_hacks: true, + ..default_config() + } + ), + Ok(vec![ + // this async is followed by a def, so it's converted to an Async + (TokType::Async, "async"), + (TokType::Name, "def"), + (TokType::Name, "fn"), + (TokType::Op, "("), + (TokType::Op, ")"), + (TokType::Op, ":"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + // this await is inside a function, and is converted into an Await + (TokType::Await, "await"), + (TokType::Name, "foo"), + (TokType::Newline, "\n"), + (TokType::Dedent, ""), + // this await is outside the function, and is turned into an identifier + (TokType::Name, "await"), + (TokType::Name, "bar") + ]) + ); +} + +#[test] +fn test_blankline() { + assert_eq!( + tokenize_all("\n \n\t\n\x0c\n\n", &default_config()), + Ok(vec![]) + ); +} + +#[test] +fn test_newline() { + assert_eq!( + tokenize_all("a\nb\rc\r\n", &default_config()), + Ok(vec![ + (TokType::Name, "a"), + (TokType::Newline, "\n"), + (TokType::Name, "b"), + (TokType::Newline, "\r"), + (TokType::Name, "c"), + (TokType::Newline, "\r\n") + ]) + ); +} + +#[test] +fn test_indent_dedent() { + assert_eq!( + tokenize_all("one\n two\n sameindent\n", &default_config()), + Ok(vec![ + (TokType::Name, "one"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + (TokType::Name, "two"), + (TokType::Newline, "\n"), + (TokType::Name, "sameindent"), + (TokType::Newline, "\n"), + (TokType::Dedent, "") + ]) + ); + + assert_eq!( + tokenize_all("one\n two\n \tthree\n", &default_config()), + Ok(vec![ + (TokType::Name, "one"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + (TokType::Name, "two"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + (TokType::Name, "three"), + (TokType::Newline, "\n"), + (TokType::Dedent, ""), + (TokType::Dedent, "") + ]) + ); + + // indentation decreases to a new (smaller) indentation level that wasn't on the stack + assert_eq!( + tokenize_all(" one\n two", &default_config()), + Err(TokError::Dedent), + ); + + // TabSpace error without change in indentation + assert_eq!( + tokenize_all(" one\n\ttwo\n", &default_config()), + Err(TokError::TabSpace), + ); + + // TabSpace error with increase in indentation + assert_eq!( + tokenize_all(" one\n\t\ttwo\n", &default_config()), + Err(TokError::TabSpace), + ); + + // TabSpace error with decrease in indentation + assert_eq!( + tokenize_all(" one\n \ttwo\n\tthree\n", &default_config()), + Err(TokError::TabSpace), + ); + + // this looks like a TabSpace error, but CPython allows it, so we should too + assert!(tokenize_all(" \tone\n\t two\n", &default_config()).is_ok()); +} + +#[test] +fn test_integer_decimal() { + assert_eq!( + tokenize_all("123456789", &default_config()), + Ok(vec![(TokType::Number, "123456789")]) + ); + + assert_eq!( + tokenize_all("1_2_3", &default_config()), + Ok(vec![(TokType::Number, "1_2_3")]) + ); + + // doesn't consume trailing underscores + assert_eq!( + tokenize_all("123_", &default_config()), + Err(TokError::BadDecimal), + ); +} + +#[test] +fn test_integer_leading_zeros() { + assert_eq!( + tokenize_all("000", &default_config()), + Ok(vec![(TokType::Number, "000")]) + ); + + assert_eq!( + tokenize_all("0_0_0", &default_config()), + Ok(vec![(TokType::Number, "0_0_0")]) + ); + + assert_eq!( + tokenize_all("00123", &default_config()), + Err(TokError::BadDecimalLeadingZeros) + ); +} + +#[test] +fn test_integer_hexadecimal() { + assert_eq!( + tokenize_all("0x00Aa12Ff", &default_config()), + Ok(vec![(TokType::Number, "0x00Aa12Ff")]), + ); + + assert_eq!( + tokenize_all("0x_1_2_3", &default_config()), + Ok(vec![(TokType::Number, "0x_1_2_3")]), + ); + + assert_eq!( + tokenize_all("0x123_", &default_config()), + Err(TokError::BadHexadecimal), + ); +} + +#[test] +fn test_integer_octal() { + assert_eq!( + tokenize_all("0o001234567", &default_config()), + Ok(vec![(TokType::Number, "0o001234567")]), + ); + + assert_eq!( + tokenize_all("0o_1_2_3", &default_config()), + Ok(vec![(TokType::Number, "0o_1_2_3")]), + ); + + assert_eq!( + tokenize_all("0o123_", &default_config()), + Err(TokError::BadOctal), + ); + + assert_eq!( + tokenize_all("0o789", &default_config()), + Err(TokError::BadOctalDigit('8')), + ); +} + +#[test] +fn test_integer_binary() { + assert_eq!( + tokenize_all("0b00101011", &default_config()), + Ok(vec![(TokType::Number, "0b00101011")]), + ); + + assert_eq!( + tokenize_all("0b_0_1_0_1", &default_config()), + Ok(vec![(TokType::Number, "0b_0_1_0_1")]), + ); + + assert_eq!( + tokenize_all("0b0101_", &default_config()), + Err(TokError::BadBinary), + ); + + assert_eq!( + tokenize_all("0b0123", &default_config()), + Err(TokError::BadBinaryDigit('2')), + ); +} + +#[test] +fn test_fraction() { + // fraction starting with a dot + assert_eq!( + tokenize_all(".5", &default_config()), + Ok(vec![(TokType::Number, ".5")]) + ); + + // fraction starting with a dot using E + assert_eq!( + tokenize_all(".5e9", &default_config()), + Ok(vec![(TokType::Number, ".5e9")]) + ); + + // fraction starting with a dot using J + assert_eq!( + tokenize_all(".5j", &default_config()), + Ok(vec![(TokType::Number, ".5j")]) + ); + + // fraction starting with a zero + assert_eq!( + tokenize_all("0.5", &default_config()), + Ok(vec![(TokType::Number, "0.5")]) + ); + + // fraction starting with a zero using E + assert_eq!( + tokenize_all("0.5e9", &default_config()), + Ok(vec![(TokType::Number, "0.5e9")]) + ); + + // fraction starting with a zero using J + assert_eq!( + tokenize_all("0.5j", &default_config()), + Ok(vec![(TokType::Number, "0.5j")]) + ); + + // fraction with underscores + assert_eq!( + tokenize_all("1_0.2_5", &default_config()), + Ok(vec![(TokType::Number, "1_0.2_5")]) + ); + + // underscores after the fraction are an error + assert_eq!( + tokenize_all(".5_", &default_config()), + Err(TokError::BadDecimal), + ); + + // doesn't consume underscores around the dot + assert_eq!( + tokenize_all("1_.25", &default_config()), + Err(TokError::BadDecimal), + ); + + // doesn't consume underscores around the dot + assert_eq!( + tokenize_all("1._25", &default_config()), + Ok(vec![(TokType::Number, "1."), (TokType::Name, "_25")]) + ); +} + +#[test] +fn test_string() { + // empty, single quote + assert_eq!( + tokenize_all("''", &default_config()), + Ok(vec![(TokType::String, "''")]), + ); + + // empty, double quote + assert_eq!( + tokenize_all(r#""""#, &default_config()), + Ok(vec![(TokType::String, r#""""#)]), + ); + + // simple string + assert_eq!( + tokenize_all("'test'", &default_config()), + Ok(vec![(TokType::String, "'test'")]), + ); + + // mixed quotes + assert_eq!( + tokenize_all(r#""test'"#, &default_config()), + Err(TokError::UnterminatedString), + ); + + // single quoted strings can contain double quotes, double quoted strings can contain single + // quotes + assert_eq!( + tokenize_all( + r#"'she said "hey"' "but he'd ignored her""#, + &default_config() + ), + Ok(vec![ + (TokType::String, r#"'she said "hey"'"#), + (TokType::String, r#""but he'd ignored her""#) + ]), + ); + + // escape characters + assert_eq!( + tokenize_all("'a\\b\\c\\d\\e\\'\\f\\g'", &default_config()), + Ok(vec![(TokType::String, "'a\\b\\c\\d\\e\\'\\f\\g'"),]), + ); + + // newline in the middle of a string causes an unterminated string + assert_eq!( + tokenize_all("'first\nsecond'", &default_config()), + Err(TokError::UnterminatedString), + ); + + // newlines can be escaped and are preserved in the output + assert_eq!( + tokenize_all("'first\\\nsecond\\\r\nthird\\\r'", &default_config()), + Ok(vec![(TokType::String, "'first\\\nsecond\\\r\nthird\\\r'"),]), + ); +} + +#[test] +fn test_string_triple_quoted() { + // empty, single quote + assert_eq!( + tokenize_all("''''''", &default_config()), + Ok(vec![(TokType::String, "''''''")]), + ); + + // empty, double quote + assert_eq!( + tokenize_all(r#""""""""#, &default_config()), + Ok(vec![(TokType::String, r#""""""""#)]), + ); + + // simple string with newlines + assert_eq!( + tokenize_all("'''\nmulti\rline\r\n'''", &default_config()), + Ok(vec![(TokType::String, "'''\nmulti\rline\r\n'''")]), + ); + + // unterminated string + assert_eq!( + tokenize_all( + "'''hey'there's''quotes'here, but not '' three'", + &default_config() + ), + Err(TokError::UnterminatedTripleQuotedString), + ); +} + +#[test] +fn test_string_prefix() { + // works with double-quoted string + assert_eq!( + tokenize_all(r#"b"""#, &default_config()), + Ok(vec![(TokType::String, r#"b"""#)]), + ); + + // works with triple-quoted string + assert_eq!( + tokenize_all("b'''test'''", &default_config()), + Ok(vec![(TokType::String, "b'''test'''")]), + ); + + // prefix can be capitalized + assert_eq!( + tokenize_all("B'' R'' U'' F''", &default_config()), + Ok(vec![ + (TokType::String, "B''"), + (TokType::String, "R''"), + (TokType::String, "U''"), + (TokType::String, "F''"), + ]), + ); + + // valid prefixes + assert_eq!( + tokenize_all("b'' r'' u'' f'' br'' fr'' rb'' rf''", &default_config()), + Ok(vec![ + (TokType::String, "b''"), + (TokType::String, "r''"), + (TokType::String, "u''"), + (TokType::String, "f''"), + (TokType::String, "br''"), + (TokType::String, "fr''"), + (TokType::String, "rb''"), + (TokType::String, "rf''"), + ]), + ); + + // invalid prefixes + assert_eq!( + tokenize_all("bb'' rr'' uu'' ff'' ur'' ub'' uf'' fb''", &default_config()), + Ok(vec![ + (TokType::Name, "bb"), + (TokType::String, "''"), + (TokType::Name, "rr"), + (TokType::String, "''"), + (TokType::Name, "uu"), + (TokType::String, "''"), + (TokType::Name, "ff"), + (TokType::String, "''"), + (TokType::Name, "ur"), + (TokType::String, "''"), + (TokType::Name, "ub"), + (TokType::String, "''"), + (TokType::Name, "uf"), + (TokType::String, "''"), + (TokType::Name, "fb"), + (TokType::String, "''"), + ]), + ); +} + +#[test] +fn test_split_fstring() { + let config = TokConfig { + split_fstring: true, + ..default_config() + }; + + assert_eq!( + tokenize_all("f''", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::FStringEnd, "'"), + ]), + ); + + assert_eq!( + tokenize_all("f'{value}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::Op, "{"), + (TokType::Name, "value"), + (TokType::Op, "}"), + (TokType::FStringEnd, "'"), + ]), + ); + + assert_eq!( + tokenize_all("f'{{just a string}}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::FStringString, r"{{just a string}}"), + (TokType::FStringEnd, "'"), + ]), + ); + + assert_eq!( + tokenize_all(r"f'\N{Latin Small Letter A}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::FStringString, r"\N{Latin Small Letter A}"), + (TokType::FStringEnd, "'"), + ]), + ); + + // format specifier + assert_eq!( + tokenize_all("f'result: {value:{width}.{precision}}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::FStringString, "result: "), + (TokType::Op, "{"), + (TokType::Name, "value"), + (TokType::Op, ":"), + (TokType::Op, "{"), + (TokType::Name, "width"), + (TokType::Op, "}"), + (TokType::FStringString, "."), + (TokType::Op, "{"), + (TokType::Name, "precision"), + (TokType::Op, "}"), + (TokType::Op, "}"), + (TokType::FStringEnd, "'"), + ]), + ); + + // the walrus operator isn't valid unless parenthesized + assert_eq!( + tokenize_all("f'{a := b}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::Op, "{"), + (TokType::Name, "a"), + (TokType::Op, ":"), + (TokType::FStringString, "= b"), + (TokType::Op, "}"), + (TokType::FStringEnd, "'"), + ]), + ); + + // once parenthesized, this is recognized as the walrus operator + assert_eq!( + tokenize_all("f'{(a := b)}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::Op, "{"), + (TokType::Op, "("), + (TokType::Name, "a"), + (TokType::Op, ":="), + (TokType::Name, "b"), + (TokType::Op, ")"), + (TokType::Op, "}"), + (TokType::FStringEnd, "'"), + ]), + ); +} + +#[test] +fn test_operator() { + assert_eq!( + tokenize_all("= == * ** **= -> . .. ...", &default_config()), + Ok(vec![ + (TokType::Op, "="), + (TokType::Op, "=="), + (TokType::Op, "*"), + (TokType::Op, "**"), + (TokType::Op, "**="), + (TokType::Op, "->"), + (TokType::Op, "."), + (TokType::Op, "."), + (TokType::Op, "."), + (TokType::Op, "...") + ]), + ); +} + +#[test] +fn test_fake_newline() { + assert_eq!( + tokenize_with_end_marker("foo", &default_config()), + Ok(vec![ + (TokType::Name, "foo"), + (TokType::Newline, ""), + (TokType::EndMarker, "") + ]) + ); +} + +#[test] +fn test_no_fake_newline_for_empty_input() { + assert_eq!( + tokenize_with_end_marker("", &default_config()), + Ok(vec![(TokType::EndMarker, "")]) + ); +} + +#[test] +fn test_no_fake_newline_for_only_whitespaces() { + assert_eq!( + tokenize_with_end_marker(" ", &default_config()), + Ok(vec![(TokType::EndMarker, "")]) + ); +} + +#[test] +fn test_add_dedents_after_fake_newline() { + assert_eq!( + tokenize_with_end_marker("if 1:\n if 2:\n foo", &default_config()), + Ok(vec![ + (TokType::Name, "if"), + (TokType::Number, "1"), + (TokType::Op, ":"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + (TokType::Name, "if"), + (TokType::Number, "2"), + (TokType::Op, ":"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + (TokType::Name, "foo"), + (TokType::Newline, ""), + (TokType::Dedent, ""), + (TokType::Dedent, ""), + (TokType::EndMarker, "") + ]) + ); +} diff --git a/native/libcst/src/tokenizer/text_position/char_width.rs b/native/libcst/src/tokenizer/text_position/char_width.rs new file mode 100644 index 00000000..84f36405 --- /dev/null +++ b/native/libcst/src/tokenizer/text_position/char_width.rs @@ -0,0 +1,329 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use std::str::Chars; + +#[derive(Debug, Eq, PartialEq)] +pub struct CharWidth { + pub byte_width: usize, + pub char_width: usize, + pub character: char, +} + +/// Iterates over characters (unicode codepoints) normalizing `'\r'` and `"\r\n"` to `'\n'`. Also +/// gives the width of each character, but `'\r\n'` is counted as 2 bytes and 2 characters instead +/// of one even after being normalized to '\n'. +#[derive(Clone)] +pub struct NewlineNormalizedCharWidths<'t> { + iter: Chars<'t>, + text: &'t str, + idx: usize, +} + +impl<'t> NewlineNormalizedCharWidths<'t> { + pub fn new(text: &'t str) -> Self { + Self { + text, + iter: text.chars(), + idx: 0, + } + } + + pub fn previous(&mut self) -> Option<::Item> { + // This function is called infrequently. + let mut back_iter = self.text[..self.idx].chars(); + let result = match back_iter.next_back() { + // Unlikely: \n, normalization *may* be needed + Some('\n') => { + // Peek at the previous character to see we're a `\r\n` sequence + match back_iter.next_back() { + Some('\r') => Some(CharWidth { + byte_width: '\r'.len_utf8() + '\n'.len_utf8(), + char_width: 2, + character: '\n', + }), + _ => Some(CharWidth { + byte_width: '\n'.len_utf8(), + char_width: 1, + character: '\n', + }), + } + } + // Unlikely: \r, normalization is needed + Some('\r') => Some(CharWidth { + byte_width: '\n'.len_utf8(), + char_width: 1, + character: '\n', + }), + // Common case: Not \r or \n, so no normalization is needed + Some(ch) => Some(CharWidth { + byte_width: ch.len_utf8(), + char_width: 1, + character: ch, + }), + // Unlikely: EOF + None => None, + }; + if let Some(r) = &result { + self.idx -= r.byte_width; + self.iter = self.text[self.idx..].chars(); + } + result + } + + pub fn peek_character(&self) -> Option { + // This function is called very frequently. + // + // We're not using peekable or caching here, since this should be cheap enough on it's own, + // though benchmarking might prove otherwise. + match self.iter.clone().next() { + Some('\r') => Some('\n'), + ch => ch, + } + } +} + +impl<'t> Iterator for NewlineNormalizedCharWidths<'t> { + type Item = CharWidth; + + fn next(&mut self) -> Option { + // This function is called very frequently. + let result = match self.iter.next() { + // Unlikely: \r, normalization is needed + Some('\r') => { + // Peek at the next character to see if it's '\n'. + let mut speculative = self.iter.clone(); + match speculative.next() { + Some('\n') => { + self.iter = speculative; + Some(CharWidth { + byte_width: '\r'.len_utf8() + '\n'.len_utf8(), + char_width: 2, + character: '\n', + }) + } + _ => Some(CharWidth { + byte_width: '\r'.len_utf8(), + char_width: 1, + character: '\n', + }), + } + } + // Common case: Not \r, so no normalization is needed + Some(ch) => Some(CharWidth { + byte_width: ch.len_utf8(), + char_width: 1, + character: ch, + }), + // Unlikely: EOF + None => None, + }; + if let Some(r) = &result { + self.idx += r.byte_width; + } + result + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ascii_no_newlines() { + let mut cw = NewlineNormalizedCharWidths::new("in"); + + // go forward + assert_eq!(cw.peek_character(), Some('i')); + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 'i' + }) + ); + assert_eq!(cw.peek_character(), Some('n')); + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 'n' + }) + ); + + // end of text + assert_eq!(cw.peek_character(), None); + assert_eq!(cw.next(), None); + + // go backwards + assert_eq!( + cw.previous(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 'n' + }) + ); + assert_eq!( + cw.previous(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 'i' + }) + ); + + // beginning of text + assert_eq!(cw.previous(), None); + + // try going foward again + assert_eq!(cw.peek_character(), Some('i')); + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 'i' + }) + ); + } + + #[test] + fn test_unicode_no_newlines() { + // "test" with an accented 'e' + let mut cw = NewlineNormalizedCharWidths::new("t\u{00e9}st"); + + // go forward + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 't' + }) + ); + assert_eq!(cw.peek_character(), Some('\u{00e9}')); + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 2, + char_width: 1, + character: '\u{00e9}' + }) + ); + assert_eq!(cw.peek_character(), Some('s')); + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 's' + }) + ); + + // go backwards + assert_eq!( + cw.previous(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 's' + }) + ); + assert_eq!( + cw.previous(), + Some(CharWidth { + byte_width: 2, + char_width: 1, + character: '\u{00e9}' + }) + ); + assert_eq!( + cw.previous(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: 't' + }) + ); + } + + #[test] + fn test_newlines() { + let mut cw = NewlineNormalizedCharWidths::new("\n\r\r\n"); + + // go forward + assert_eq!(cw.peek_character(), Some('\n')); + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: '\n' + }) + ); + assert_eq!(cw.peek_character(), Some('\n')); + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: '\n' + }) + ); + assert_eq!(cw.peek_character(), Some('\n')); + assert_eq!( + cw.next(), + Some(CharWidth { + byte_width: 2, + char_width: 2, + character: '\n' + }) + ); + + // end of text + assert_eq!(cw.peek_character(), None); + assert_eq!(cw.next(), None); + + // go backwards + assert_eq!( + cw.previous(), + Some(CharWidth { + byte_width: 2, + char_width: 2, + character: '\n' + }) + ); + assert_eq!( + cw.previous(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: '\n' + }) + ); + assert_eq!( + cw.previous(), + Some(CharWidth { + byte_width: 1, + char_width: 1, + character: '\n' + }) + ); + + // beginning of text + assert_eq!(cw.previous(), None); + } + + #[test] + fn test_empty() { + let mut cw = NewlineNormalizedCharWidths::new(""); + assert_eq!(cw.peek_character(), None); + assert_eq!(cw.next(), None); + assert_eq!(cw.previous(), None); + } +} diff --git a/native/libcst/src/tokenizer/text_position/mod.rs b/native/libcst/src/tokenizer/text_position/mod.rs new file mode 100644 index 00000000..b6905484 --- /dev/null +++ b/native/libcst/src/tokenizer/text_position/mod.rs @@ -0,0 +1,353 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +mod char_width; + +use once_cell::sync::Lazy; +use regex::Regex; +use std::fmt; + +use crate::tokenizer::debug_utils::EllipsisDebug; +use char_width::NewlineNormalizedCharWidths; + +static CR_OR_LF_RE: Lazy = Lazy::new(|| Regex::new(r"[\r\n]").expect("regex")); + +pub trait TextPattern { + fn match_len(&self, text: &str) -> Option; +} + +impl TextPattern for &Regex { + // make sure to anchor your regex with \A + fn match_len(&self, text: &str) -> Option { + self.find(text).map(|m| m.end()) + } +} + +impl TextPattern for &str { + // make sure to anchor your regex with \A + fn match_len(&self, text: &str) -> Option { + if text.starts_with(self) { + Some(self.len()) + } else { + None + } + } +} + +// This is Clone, since that's needed to support async_hacks, but you probably don't usually want to +// clone. Use TextPositionSnapshot instead. +#[derive(Clone)] +pub struct TextPosition<'t> { + text: &'t str, + char_widths: NewlineNormalizedCharWidths<'t>, + inner_byte_idx: usize, + inner_char_column_number: usize, + inner_byte_column_number: usize, + inner_line_number: usize, +} + +/// A lightweight immutable version of TextPosition that's slightly +/// cheaper to construct/store. Used for storing the start position of tokens. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct TextPositionSnapshot { + pub inner_byte_idx: usize, + pub inner_char_column_number: usize, + pub inner_line_number: usize, +} + +impl TextPositionSnapshot { + pub fn byte_idx(&self) -> usize { + self.inner_byte_idx + } + + pub fn char_column_number(&self) -> usize { + self.inner_char_column_number + } + + pub fn line_number(&self) -> usize { + self.inner_line_number + } +} + +impl<'t> TextPosition<'t> { + pub fn new(text: &'t str) -> Self { + Self { + text, + char_widths: NewlineNormalizedCharWidths::new(text), + inner_byte_idx: 0, + inner_char_column_number: 0, + inner_byte_column_number: 0, + inner_line_number: 1, + } + } + + /// Peeks at the next character. Similar to `std::iter::Peekable`, but doesn't modify our + /// internal position counters like wrapping this in `Peekable` would. + pub fn peek(&mut self) -> Option<::Item> { + self.char_widths.peek_character() + } + + /// Matches, but does not consume TextPattern. + /// + /// Caution: This does not normalize `'\r'` characters, like `peek()` and `next()` do. + pub fn matches(&self, pattern: P) -> bool { + let rest_of_text = &self.text[self.inner_byte_idx..]; + let match_len = pattern.match_len(rest_of_text); + match match_len { + Some(match_len) => { + assert!( + !CR_OR_LF_RE.is_match(&rest_of_text[..match_len]), + "matches pattern must not match a newline", + ); + true + } + None => false, + } + } + + /// Moves the iterator back one character. Panics if a newline is encountered or if we try to + /// back up past the beginning of the text. + pub fn backup_no_newline(&mut self) { + if let Some(cw) = self.char_widths.previous() { + // If we tried to back up across a newline, we'd have to recompute char_column_number, + // which would be expensive, so it's unsupported. + self.inner_char_column_number = self + .inner_char_column_number + .checked_sub(1) + .expect("cannot back up past the beginning of a line."); + self.inner_byte_idx -= cw.byte_width; + } else { + panic!("Tried to backup past the beginning of the text.") + } + } + + /// Tries to consume the given TextPattern, moving the TextPosition forward. Returns false if no + /// match was found. Does not support newlines. + /// + /// Panics if a newline is consumed as part of the pattern. + pub fn consume(&mut self, pattern: P) -> bool { + let rest_of_text = &self.text[self.inner_byte_idx..]; + if let Some(len) = pattern.match_len(rest_of_text) { + let new_byte_idx = self.inner_byte_idx + len; + // Call next() a bunch of times to advance the character counters. There's no way to + // shortcut this because we don't know how many characters are in a slice of bytes, + // though we could use a faster algorithm that inspects multiple characters at once + // (e.g. SIMD). + while self.inner_byte_idx < new_byte_idx { + // We can't support newline normalization in this API without copying the string, so + // rather than exposing that (potentially dangerous) behavior, panic if it happens. + assert!( + self.next() != Some('\n'), + "consume pattern must not match a newline", + ); + } + // this shouldn't be possible for the provided implementations of TextPattern + debug_assert!( + self.inner_byte_idx == new_byte_idx, + "pattern ended on a non-character boundary", + ); + true + } else { + false + } + } + + pub fn text(&self) -> &'t str { + self.text + } + + pub fn slice_from_start_pos(&self, start_pos: &TextPositionSnapshot) -> &'t str { + &self.text[start_pos.byte_idx()..self.byte_idx()] + } + + /// Returns the number of bytes we've traversed. This is useful for Rust code that needs to + /// slice the input source code, since Rust slices operate on bytes and not unicode codepoints. + pub fn byte_idx(&self) -> usize { + self.inner_byte_idx + } + + /// Returns the column number in terms of number of characters (unicode codepoints) past the + /// beginning of the line. Zero-indexed. + pub fn char_column_number(&self) -> usize { + self.inner_char_column_number + } + + pub fn byte_column_number(&self) -> usize { + self.inner_byte_column_number + } + + /// Returns the one-indexed line number. + pub fn line_number(&self) -> usize { + self.inner_line_number + } +} + +impl Iterator for TextPosition<'_> { + type Item = char; + + /// Gets the next character. This has the side-effect of advancing the internal position + /// counters. + fn next(&mut self) -> Option { + if let Some(cw) = self.char_widths.next() { + self.inner_byte_idx += cw.byte_width; + match cw.character { + '\n' => { + self.inner_line_number += 1; + self.inner_char_column_number = 0; + self.inner_byte_column_number = 0; + } + _ => { + self.inner_char_column_number += cw.char_width; + self.inner_byte_column_number += cw.byte_width; + } + } + Some(cw.character) + } else { + None + } + } +} + +impl fmt::Debug for TextPosition<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("TextPosition") + .field("text", &EllipsisDebug) + .field("char_widths", &EllipsisDebug) + .field("inner_byte_idx", &self.inner_byte_idx) + .field("inner_char_column_number", &self.inner_char_column_number) + .field("inner_line_number", &self.inner_line_number) + .finish() + } +} + +impl From<&TextPosition<'_>> for TextPositionSnapshot { + fn from(tp: &TextPosition) -> Self { + Self { + inner_byte_idx: tp.inner_byte_idx, + inner_char_column_number: tp.inner_char_column_number, + inner_line_number: tp.inner_line_number, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_empty() { + let mut pos = TextPosition::new(""); + assert_eq!(pos.byte_idx(), 0); + assert_eq!(pos.char_column_number(), 0); + assert_eq!(pos.line_number(), 1); + assert_eq!(pos.peek(), None); + assert!(!pos.consume(&Regex::new(r"\Awon't match").unwrap())); + assert!(pos.consume(&Regex::new(r"\A").unwrap())); + assert_eq!(pos.next(), None); + // call next() again to verify that it's fused + assert_eq!(pos.next(), None); + } + + #[test] + fn test_ascii() { + let mut pos = TextPosition::new("abcdefg"); + + assert_eq!(pos.peek(), Some('a')); + assert_eq!(pos.next(), Some('a')); + assert_eq!(pos.byte_idx(), 1); + assert_eq!(pos.char_column_number(), 1); + assert_eq!(pos.line_number(), 1); + + // consume a few characters with a regex + assert!(!pos.consume(&Regex::new(r"\Awon't match").unwrap())); + assert!(pos.consume(&Regex::new(r"\Abcd").unwrap())); + assert_eq!(pos.byte_idx(), 4); + assert_eq!(pos.char_column_number(), 4); + assert_eq!(pos.line_number(), 1); + + // consume the rest of the text + assert_eq!(pos.next(), Some('e')); + assert_eq!(pos.next(), Some('f')); + assert_eq!(pos.next(), Some('g')); + assert_eq!(pos.next(), None); + assert_eq!(pos.byte_idx(), 7); + assert_eq!(pos.char_column_number(), 7); + assert_eq!(pos.line_number(), 1); + } + + #[test] + fn test_unicode() { + let mut pos = TextPosition::new("\u{00e9}abc"); + + assert_eq!(pos.peek(), Some('\u{00e9}')); + assert_eq!(pos.next(), Some('\u{00e9}')); + } + + #[test] + fn test_newline_lf() { + let mut pos = TextPosition::new("ab\nde"); + + assert_eq!(pos.next(), Some('a')); + assert_eq!(pos.next(), Some('b')); + assert_eq!(pos.line_number(), 1); + assert_eq!(pos.char_column_number(), 2); + + assert_eq!(pos.next(), Some('\n')); + assert_eq!(pos.line_number(), 2); + assert_eq!(pos.char_column_number(), 0); + + assert_eq!(pos.next(), Some('d')); + assert_eq!(pos.next(), Some('e')); + assert_eq!(pos.next(), None); + assert_eq!(pos.line_number(), 2); + assert_eq!(pos.char_column_number(), 2); + + assert_eq!(pos.byte_idx(), 5); + } + + #[test] + fn test_newline_cr() { + let mut pos = TextPosition::new("ab\rde"); + + assert_eq!(pos.next(), Some('a')); + assert_eq!(pos.next(), Some('b')); + assert_eq!(pos.line_number(), 1); + assert_eq!(pos.char_column_number(), 2); + + assert_eq!(pos.next(), Some('\n')); + assert_eq!(pos.line_number(), 2); + assert_eq!(pos.char_column_number(), 0); + + assert_eq!(pos.next(), Some('d')); + assert_eq!(pos.next(), Some('e')); + assert_eq!(pos.next(), None); + assert_eq!(pos.line_number(), 2); + assert_eq!(pos.char_column_number(), 2); + + assert_eq!(pos.byte_idx(), 5); + } + + #[test] + fn test_newline_cr_lf() { + let mut pos = TextPosition::new("ab\r\nde"); + + assert_eq!(pos.next(), Some('a')); + assert_eq!(pos.next(), Some('b')); + assert_eq!(pos.line_number(), 1); + assert_eq!(pos.char_column_number(), 2); + + assert_eq!(pos.next(), Some('\n')); + assert_eq!(pos.line_number(), 2); + assert_eq!(pos.char_column_number(), 0); + + assert_eq!(pos.next(), Some('d')); + assert_eq!(pos.next(), Some('e')); + assert_eq!(pos.next(), None); + assert_eq!(pos.line_number(), 2); + assert_eq!(pos.char_column_number(), 2); + + assert_eq!(pos.byte_idx(), 6); + } +} diff --git a/native/libcst/src/tokenizer/whitespace_parser.rs b/native/libcst/src/tokenizer/whitespace_parser.rs new file mode 100644 index 00000000..e624bc3d --- /dev/null +++ b/native/libcst/src/tokenizer/whitespace_parser.rs @@ -0,0 +1,401 @@ +use crate::nodes::{ + Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, + SimpleWhitespace, TrailingWhitespace, +}; +use once_cell::sync::Lazy; +use regex::Regex; +use thiserror::Error; + +use crate::Token; + +use super::TokType; + +static SIMPLE_WHITESPACE_RE: Lazy = + Lazy::new(|| Regex::new(r"\A([ \f\t]|\\(\r\n?|\n))*").expect("regex")); +static NEWLINE_RE: Lazy = Lazy::new(|| Regex::new(r"\A(\r\n?|\n)").expect("regex")); +static COMMENT_RE: Lazy = Lazy::new(|| Regex::new(r"\A#[^\r\n]*").expect("regex")); + +#[allow(clippy::upper_case_acronyms, clippy::enum_variant_names)] +#[derive(Error, Debug, PartialEq, Eq)] +pub enum WhitespaceError { + #[error("WTF")] + WTF, + #[error("Internal error while parsing whitespace: {0}")] + InternalError(String), + #[error("Failed to parse mandatory trailing whitespace")] + TrailingWhitespaceError, +} + +type Result = std::result::Result; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct State<'a> { + pub line: usize, // one-indexed (to match parso's behavior) + pub column: usize, // zero-indexed (to match parso's behavior) + pub column_byte: usize, + pub absolute_indent: &'a str, + pub is_parenthesized: bool, + pub byte_offset: usize, +} + +impl<'a> Default for State<'a> { + fn default() -> Self { + Self { + line: 1, + column: 0, + column_byte: 0, + absolute_indent: "", + is_parenthesized: false, + byte_offset: 0, + } + } +} + +// TODO +pub struct Config<'a> { + pub input: &'a str, + pub lines: Vec<&'a str>, + pub default_newline: &'a str, + pub default_indent: &'a str, +} + +impl<'a> Config<'a> { + pub fn new(input: &'a str, tokens: &[Token<'a>]) -> Self { + let mut default_indent = " "; + for tok in tokens { + if tok.r#type == TokType::Indent { + default_indent = tok.relative_indent.unwrap(); + break; + } + } + let default_newline = Regex::new(r"\r\n?|\n") + .expect("regex") + .find(input) + .map(|m| m.as_str()) + .unwrap_or("\n"); + + Self { + input, + lines: input.split_inclusive(default_newline).collect(), + default_newline, + default_indent, + } + } + + pub fn has_trailing_newline(&self) -> bool { + self.input.ends_with('\n') + && !self.input.ends_with("\\\n") + && !self.input.ends_with("\\\r\n") + } + + fn get_line(&self, line_number: usize) -> Result<&'a str> { + let err_fn = || { + WhitespaceError::InternalError(format!( + "tried to get line {} which is out of range", + line_number + )) + }; + self.lines + .get(line_number.checked_sub(1).ok_or_else(err_fn)?) + .map(|l| &l[..]) + .ok_or_else(err_fn) + } + + fn get_line_after_column(&self, line_number: usize, column_index: usize) -> Result<&'a str> { + self.get_line(line_number)? + .get(column_index..) + .ok_or_else(|| { + WhitespaceError::InternalError(format!( + "Column index {} out of range for line {}", + column_index, line_number + )) + }) + } +} + +#[derive(Debug)] +enum ParsedEmptyLine<'a> { + NoIndent, + Line(EmptyLine<'a>), +} + +fn parse_empty_line<'a>( + config: &Config<'a>, + state: &mut State, + override_absolute_indent: Option<&'a str>, +) -> Result> { + let mut speculative_state = state.clone(); + if let Ok(indent) = parse_indent(config, &mut speculative_state, override_absolute_indent) { + let whitespace = parse_simple_whitespace(config, &mut speculative_state)?; + let comment = parse_comment(config, &mut speculative_state)?; + if let Some(newline) = parse_newline(config, &mut speculative_state)? { + *state = speculative_state; + return Ok(ParsedEmptyLine::Line(EmptyLine { + indent, + whitespace, + comment, + newline, + })); + } + } + Ok(ParsedEmptyLine::NoIndent) +} + +fn _parse_empty_lines<'a>( + config: &Config<'a>, + state: &mut State<'a>, + override_absolute_indent: Option<&'a str>, +) -> Result, EmptyLine<'a>)>> { + let mut lines = vec![]; + loop { + let last_state = state.clone(); + let parsed_line = parse_empty_line(config, state, override_absolute_indent)?; + if *state == last_state { + break; + } + match parsed_line { + ParsedEmptyLine::NoIndent => break, + ParsedEmptyLine::Line(l) => lines.push((state.clone(), l)), + } + } + Ok(lines) +} + +pub fn parse_empty_lines<'a>( + config: &Config<'a>, + state: &mut State<'a>, + override_absolute_indent: Option<&'a str>, +) -> Result>> { + // If override_absolute_indent is Some, then we need to parse all lines up to and including the + // last line that is indented at our level. These all belong to the footer and not to the next + // line's leading_lines. + // + // We don't know what the last line with indent=True is, and there could be indent=False lines + // interspersed with indent=True lines, so we need to speculatively parse all possible empty + // lines, and then unwind to find the last empty line with indent=True. + let mut speculative_state = state.clone(); + let mut lines = _parse_empty_lines(config, &mut speculative_state, override_absolute_indent)?; + + if override_absolute_indent.is_some() { + // Remove elements from the end until we find an indented line. + while let Some((_, empty_line)) = lines.last() { + if empty_line.indent { + break; + } + lines.pop(); + } + } + + if let Some((final_state, _)) = lines.last() { + // update the state to match the last line that we captured + *state = final_state.clone(); + } + + Ok(lines.into_iter().map(|(_, e)| e).collect()) +} + +pub fn parse_comment<'a>(config: &Config<'a>, state: &mut State) -> Result>> { + if let Some(comment_match) = + COMMENT_RE.find(config.get_line_after_column(state.line, state.column_byte)?) + { + let comment_str = comment_match.as_str(); + advance_this_line( + config, + state, + comment_str.chars().count(), + comment_str.len(), + )?; + return Ok(Some(Comment(comment_str))); + } + Ok(None) +} + +pub fn parse_newline<'a>(config: &Config<'a>, state: &mut State) -> Result>> { + if let Some(newline_match) = + NEWLINE_RE.find(config.get_line_after_column(state.line, state.column_byte)?) + { + let newline_str = newline_match.as_str(); + advance_this_line( + config, + state, + newline_str.chars().count(), + newline_str.len(), + )?; + if state.column_byte != config.get_line(state.line)?.len() { + return Err(WhitespaceError::InternalError(format!( + "Found newline at ({}, {}) but it's not EOL", + state.line, state.column + ))); + } + if state.line < config.lines.len() { + advance_to_next_line(config, state)?; + } + return Ok(Some(Newline( + if newline_str == config.default_newline { + None + } else { + Some(newline_str) + }, + Fakeness::Real, + ))); + } + + // If we're at the end of the file but not on BOL, that means this is the fake + // newline inserted by the tokenizer. + if state.byte_offset == config.input.len() && state.column_byte != 0 { + return Ok(Some(Newline(None, Fakeness::Fake))); + } + Ok(None) +} + +pub fn parse_optional_trailing_whitespace<'a>( + config: &Config<'a>, + state: &mut State, +) -> Result>> { + let mut speculative_state = state.clone(); + let whitespace = parse_simple_whitespace(config, &mut speculative_state)?; + let comment = parse_comment(config, &mut speculative_state)?; + if let Some(newline) = parse_newline(config, &mut speculative_state)? { + *state = speculative_state; + Ok(Some(TrailingWhitespace { + whitespace, + comment, + newline, + })) + } else { + Ok(None) + } +} + +pub fn parse_trailing_whitespace<'a>( + config: &Config<'a>, + state: &mut State, +) -> Result> { + match parse_optional_trailing_whitespace(config, state)? { + Some(ws) => Ok(ws), + _ => Err(WhitespaceError::TrailingWhitespaceError), + } +} + +fn parse_indent<'a>( + config: &Config<'a>, + state: &mut State, + override_absolute_indent: Option<&'a str>, +) -> Result { + let absolute_indent = override_absolute_indent.unwrap_or(state.absolute_indent); + if state.column_byte != 0 { + if state.column_byte == config.get_line(state.line)?.len() + && state.line == config.lines.len() + { + Ok(false) + } else { + Err(WhitespaceError::InternalError( + "Column should not be 0 when parsing an index".to_string(), + )) + } + } else { + Ok( + if config + .get_line_after_column(state.line, state.column_byte)? + .starts_with(absolute_indent) + { + state.column_byte += absolute_indent.len(); + state.column += absolute_indent.chars().count(); + state.byte_offset += absolute_indent.len(); + true + } else { + false + }, + ) + } +} + +fn advance_to_next_line<'a>(config: &Config<'a>, state: &mut State) -> Result<()> { + let cur_line = config.get_line(state.line)?; + state.byte_offset += cur_line.len() - state.column_byte; + state.column = 0; + state.column_byte = 0; + state.line += 1; + Ok(()) +} + +fn advance_this_line<'a>( + config: &Config<'a>, + state: &mut State, + char_count: usize, + offset: usize, +) -> Result<()> { + let cur_line = config.get_line(state.line)?; + if cur_line.len() < state.column_byte + offset { + return Err(WhitespaceError::InternalError(format!( + "Tried to advance past line {}'s end", + state.line + ))); + } + state.column += char_count; + state.column_byte += offset; + state.byte_offset += offset; + Ok(()) +} + +pub fn parse_simple_whitespace<'a>( + config: &Config<'a>, + state: &mut State, +) -> Result> { + let capture_ws = |line, col| -> Result<&'a str> { + let x = config.get_line_after_column(line, col); + let x = x?; + Ok(SIMPLE_WHITESPACE_RE + .find(x) + .expect("SIMPLE_WHITESPACE_RE supports 0-length matches, so it must always match") + .as_str()) + }; + let start_offset = state.byte_offset; + let mut prev_line: &str; + loop { + prev_line = capture_ws(state.line, state.column_byte)?; + if !prev_line.contains('\\') { + break; + } + advance_to_next_line(config, state)?; + } + advance_this_line(config, state, prev_line.chars().count(), prev_line.len())?; + + Ok(SimpleWhitespace( + &config.input[start_offset..state.byte_offset], + )) +} + +pub fn parse_parenthesizable_whitespace<'a>( + config: &Config<'a>, + state: &mut State<'a>, +) -> Result> { + if state.is_parenthesized { + if let Some(ws) = parse_parenthesized_whitespace(config, state)? { + return Ok(ParenthesizableWhitespace::ParenthesizedWhitespace(ws)); + } + } + parse_simple_whitespace(config, state).map(ParenthesizableWhitespace::SimpleWhitespace) +} + +pub fn parse_parenthesized_whitespace<'a>( + config: &Config<'a>, + state: &mut State<'a>, +) -> Result>> { + if let Some(first_line) = parse_optional_trailing_whitespace(config, state)? { + let empty_lines = _parse_empty_lines(config, state, None)? + .into_iter() + .map(|(_, line)| line) + .collect(); + let indent = parse_indent(config, state, None)?; + let last_line = parse_simple_whitespace(config, state)?; + Ok(Some(ParenthesizedWhitespace { + first_line, + empty_lines, + indent, + last_line, + })) + } else { + Ok(None) + } +} diff --git a/native/libcst/tests/fixtures/class_craziness.py b/native/libcst/tests/fixtures/class_craziness.py new file mode 100644 index 00000000..67afc649 --- /dev/null +++ b/native/libcst/tests/fixtures/class_craziness.py @@ -0,0 +1,28 @@ +class Foo: ... + +class Bar : + ... + +class Old ( ) : + gold : int + + +class OO ( Foo ) : ... + +class OOP ( Foo , Bar, ) : pass + +class OOPS ( + Foo , + +) : + pass + +class OOPSI ( Foo, * Bar , metaclass = + foo , +): pass + +class OOPSIE ( list , *args, kw = arg , ** kwargs ) : + what : does_this_even = mean + + def __init__(self) -> None: + self.foo: Bar = Bar() diff --git a/native/libcst/tests/fixtures/comments.py b/native/libcst/tests/fixtures/comments.py new file mode 100644 index 00000000..80830d21 --- /dev/null +++ b/native/libcst/tests/fixtures/comments.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +# fmt: on +# Some license here. +# +# Has many lines. Many, many lines. +# Many, many, many lines. +"""Module docstring. + +Possibly also many, many lines. +""" + +import os.path +import sys + +import a +from b.c.d.e import X # some noqa comment + +try: + import fast +except ImportError: + import slow as fast + + +# Some comment before a function. +y = 1 +( + # some strings + y # type: ignore +) + + +def function(default=None): + """Docstring comes first. + + Possibly many lines. + """ + # FIXME: Some comment about why this function is crap but still in production. + import inner_imports + + if inner_imports.are_evil(): + # Explains why we have this if. + # In great detail indeed. + x = X() + return x.method1() # type: ignore + + + # This return is also commented for some reason. + return default + + +# Explains why we use global state. +GLOBAL_STATE = {"a": a(1), "b": a(2), "c": a(3)} + + +# Another comment! +# This time two lines. + + +class Foo: + """Docstring for class Foo. Example from Sphinx docs.""" + + #: Doc comment for class attribute Foo.bar. + #: It can have multiple lines. + bar = 1 + + flox = 1.5 #: Doc comment for Foo.flox. One line only. + + baz = 2 + """Docstring for class attribute Foo.baz.""" + + def __init__(self): + #: Doc comment for instance attribute qux. + self.qux = 3 + + self.spam = 4 + """Docstring for instance attribute spam.""" + + +#'

This is pweave!

+ + +@fast(really=True) +async def wat(): + # This comment, for some reason \ + # contains a trailing backslash. + async with X.open_async() as x: # Some more comments + result = await x.method1() + # Comment after ending a block. + if result: + print("A OK", file=sys.stdout) + # Comment between things. + print() + + +if True: # Hanging comments + # because why not + pass + +# Some closing comments. +# Maybe Vim or Emacs directives for formatting. +# Who knows. diff --git a/native/libcst/tests/fixtures/comparisons.py b/native/libcst/tests/fixtures/comparisons.py new file mode 100644 index 00000000..126ea15e --- /dev/null +++ b/native/libcst/tests/fixtures/comparisons.py @@ -0,0 +1,21 @@ +if not 1: pass +if 1 and 1: pass +if 1 or 1: pass +if not not not 1: pass +if not 1 and 1 and 1: pass +if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass + +if 1: pass +#x = (1 == 1) +if 1 == 1: pass +if 1 != 1: pass +if 1 < 1: pass +if 1 > 1: pass +if 1 <= 1: pass +if 1 >= 1: pass +if x is x: pass +#if x is not x: pass +#if 1 in (): pass +#if 1 not in (): pass +if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 in x is x is x: pass +#if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in x is x is not x: pass diff --git a/native/libcst/tests/fixtures/decorated_function_without_body.py b/native/libcst/tests/fixtures/decorated_function_without_body.py new file mode 100644 index 00000000..d7c96e02 --- /dev/null +++ b/native/libcst/tests/fixtures/decorated_function_without_body.py @@ -0,0 +1,3 @@ +@hello +@bello +def f () : ... \ No newline at end of file diff --git a/native/libcst/tests/fixtures/dysfunctional_del.py b/native/libcst/tests/fixtures/dysfunctional_del.py new file mode 100644 index 00000000..a3fa4575 --- /dev/null +++ b/native/libcst/tests/fixtures/dysfunctional_del.py @@ -0,0 +1,14 @@ +# dysfunctional_del.py + +del a + +del a[1] + +del a.b.c +del ( a, b , c ) +del [ a, b , c ] + +del a , b, c + + +del a[1] , b [ 2] \ No newline at end of file diff --git a/native/libcst/tests/fixtures/expr.py b/native/libcst/tests/fixtures/expr.py new file mode 100644 index 00000000..c1c4e9b7 --- /dev/null +++ b/native/libcst/tests/fixtures/expr.py @@ -0,0 +1,375 @@ +... +"some_string" +b"\\xa3" +Name +None +True +False +1 +1.0 +1j +True or False +True or False or None +True and False +True and False and None +(Name1 and Name2) or Name3 +Name1 and Name2 or Name3 +Name1 or (Name2 and Name3) +Name1 or Name2 and Name3 +(Name1 and Name2) or (Name3 and Name4) +Name1 and Name2 or Name3 and Name4 +Name1 or (Name2 and Name3) or Name4 +Name1 or Name2 and Name3 or Name4 +v1 << 2 +1 >> v2 +1 % finished +1 + v2 - v3 * 4 ^ 5 ** v6 / 7 // 8 +((1 + v2) - (v3 * 4)) ^ (((5 ** v6) / 7) // 8) +not great +~great ++value +-1 +~int and not v1 ^ 123 + v2 | True +(~int) and (not ((v1 ^ (123 + v2)) | True)) ++(really ** -(confusing ** ~(operator ** -precedence))) +flags & ~ select.EPOLLIN and waiters.write_task is not None +lambda arg: None +lambda arg : None +lambda a=True: a +lambda a=True : a +lambda a, b, c=True: a +lambda a, b, c=True, *, d=(1 << v2), e='str': a +lambda a, b, c=True, *vararg, d=(v1 << 2), e='str', **kwargs: a + b +lambda a, b, c=True, *vararg, d=(v1 << 2), e='str', **kwargs : a + b +manylambdas = lambda x=lambda y=lambda z=1: z: y(): x() +foo = (lambda port_id, ignore_missing: {"port1": port1_resource, "port2": port2_resource}[port_id]) +1 if True else 2 +str or None if True else str or bytes or None +(str or None) if True else (str or bytes or None) +str or None if (1 if True else 2) else str or bytes or None +(str or None) if (1 if True else 2) else (str or bytes or None) +((super_long_variable_name or None) if (1 if super_long_test_name else 2) else (str or bytes or None)) +{'2.7': dead, '3.7': (long_live or die_hard)} +{'2.7': dead, '3.7': (long_live or die_hard), **{'3.6': verygood}} +{**a, **b, **c} +{"2.7", "3.6", "3.7", "3.8", "3.9"} +{"2.7", "3.6", "3.7", "3.8", "3.9",} +{"2.7", "3.6", "3.7", "3.8", "3.9", ("4.0" if gilectomy else "3.10")} +({"a": "b"}, (True or False), (+value), "string", b"bytes") or None +() +(1,) +(1, 2) +(1, 2, 3) +[] +[ ] +[ 1 , ] +[1, 2, 3, 4, 5, 6, 7, 8, 9, (10 or A), (11 or B), (12 or C)] +[ + 1, + 2, + 3, +] +[*a] +[*range(10)] +[ + *a, + 4, + 5, +] +[ + 4, + *a, + 5, +] +[ + this_is_a_very_long_variable_which_will_force_a_delimiter_split, + element, + another, + *more, +] +{ } +{ 1 , } +{ 1 : 2 , } +{i for i in (1, 2, 3)} +{(i ** 2) for i in (1, 2, 3)} +{(i ** 2) for i, _ in ((1, "a"), (2, "b"), (3, "c"))} +{((i ** 2) + j) for i in (1, 2, 3) for j in (1, 2, 3)} +[i for i in (1, 2, 3)] +[(i ** 2) for i in (1, 2, 3)] +[(i ** 2) for i, _ in ((1, "a"), (2, "b"), (3, "c"))] +[((i ** 2) + j) for i in (1, 2, 3) for j in (1, 2, 3)] +{i: 0 for i in (1, 2, 3)} +{i: j for i, j in ((1, "a"), (2, "b"), (3, "c"))} +{a: b * 2 for a, b in dictionary.items()} +{a: b * -2 for a, b in dictionary.items()} +{ + k: v + for k, v in this_is_a_very_long_variable_which_will_cause_a_trailing_comma_which_breaks_the_comprehension +} +Python3 > Python2 > COBOL +Life is Life +call() +call(arg) +call(kwarg="hey") +call(arg, kwarg="hey") +call(arg, another, kwarg="hey", **kwargs) +call( + this_is_a_very_long_variable_which_will_force_a_delimiter_split, + arg, + another, + kwarg="hey", + **kwargs, +) # note: no trailing comma pre-3.6 +call(*gidgets[:2]) +call(a, *gidgets[:2]) +call(**screen_kwargs) +call(b, **screen_kwargs) +call()()()()()() +call(**self.screen_kwargs) +call(b, **self.screen_kwargs) +call(a=a, *args) +call(a=a, *args,) +call(a=a, **kwargs) +call(a=a, **kwargs,) +lukasz.langa.pl +call.me(maybe) +1 .real +1.0 .real +....__class__ +list[str] +dict[str, int] +tuple[str, ...] +tuple[str, int, float, dict[str, int]] +tuple[ + str, + int, + float, + dict[str, int], +] +very_long_variable_name_filters: t.List[ + t.Tuple[str, t.Union[str, t.List[t.Optional[str]]]], +] +xxxx_xxxxx_xxxx_xxx: Callable[..., List[SomeClass]] = classmethod( # type: ignore + sync(async_xxxx_xxx_xxxx_xxxxx_xxxx_xxx.__func__) +) +xxxx_xxx_xxxx_xxxxx_xxxx_xxx: Callable[..., List[SomeClass]] = classmethod( # type: ignore + sync(async_xxxx_xxx_xxxx_xxxxx_xxxx_xxx.__func__) +) +xxxx_xxx_xxxx_xxxxx_xxxx_xxx: Callable[..., List[SomeClass]] = classmethod( + sync(async_xxxx_xxx_xxxx_xxxxx_xxxx_xxx.__func__) +) # type: ignore +(str or None) if (sys.version_info[0] > (3,)) else (str or bytes or None) +{"2.7": dead, "3.7": long_live or die_hard} +{"2.7", "3.6", "3.7", "3.8", "3.9", "4.0" if gilectomy else "3.10"} +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10 or A, 11 or B, 12 or C] +(SomeName) +SomeName +(Good, Bad, Ugly) +(i for i in (1, 2, 3)) +((i ** 2) for i in (1, 2, 3)) +((i ** 2) for i, _ in ((1, "a"), (2, "b"), (3, "c"))) +(((i ** 2) + j) for i in (1, 2, 3) for j in (1, 2, 3)) +(*starred,) +{ + "id": "1", + "type": "type", + "started_at": now(), + "ended_at": now() + timedelta(days=10), + "priority": 1, + "import_session_id": 1, + **kwargs, +} +a = (1,) +b = (1,) +c = 1 +d = (1,) + a + (2,) +e = (1,).count(1) +f = 1, *range(10) +g = 1, *"ten" +what_is_up_with_those_new_coord_names = (coord_names + set(vars_to_create)) + set( + vars_to_remove +) +what_is_up_with_those_new_coord_names = (coord_names | set(vars_to_create)) - set( + vars_to_remove +) +result = ( + session.query(models.Customer.id) + .filter( + models.Customer.account_id == account_id, models.Customer.email == email_address + ) + .order_by(models.Customer.id.asc()) + .all() +) +result = ( + session.query(models.Customer.id) + .filter( + models.Customer.account_id == account_id, models.Customer.email == email_address + ) + .order_by( + models.Customer.id.asc(), + ) + .all() +) +Ø = set() +authors.łukasz.say_thanks() +authors.lukasz.say_thanks() +mapping = { + A: 0.25 * (10.0 / 12), + B: 0.1 * (10.0 / 12), + C: 0.1 * (10.0 / 12), + D: 0.1 * (10.0 / 12), +} +[ + a + for + [ + a , + ] + in + [ + [ 1 ] + ] +] + +def gen(): + if 1: + if 2: + if 3: + if not is_value_of_type( + subkey, + type_args[0], + # key type is always invariant + invariant_check=True, + ): + return False + yield from outside_of_generator + a = yield + b = yield + c = yield + + +async def f(): + await some.complicated[0].call(with_args=(True or (1 is not 1))) + +lambda : None + +print(*[] or [1]) +print(**{1: 3} if False else {x: x for x in range(3)}) +print(*lambda x: x) +assert not Test, "Short message" +assert this is ComplexTest and not requirements.fit_in_a_single_line( + force=False +), "Short message" +assert parens is TooMany +for (x,) in (1,), (2,), (3,): + ... +for y in (): + ... +for z in (i for i in (1, 2, 3)): + ... +for i in call(): + ... +for j in 1 + (2 + 3): + ... +else: + ... +while this and that: + ... +while this and that: + ... +else: + ... +for ( + addr_family, + addr_type, + addr_proto, + addr_canonname, + addr_sockaddr, +) in socket.getaddrinfo("google.com", "http"): + pass +a = ( + aaaa.bbbb.cccc.dddd.eeee.ffff.gggg.hhhh.iiii.jjjj.kkkk.llll.mmmm.nnnn.oooo.pppp + in qqqq.rrrr.ssss.tttt.uuuu.vvvv.xxxx.yyyy.zzzz +) +a = ( + aaaa.bbbb.cccc.dddd.eeee.ffff.gggg.hhhh.iiii.jjjj.kkkk.llll.mmmm.nnnn.oooo.pppp + not in qqqq.rrrr.ssss.tttt.uuuu.vvvv.xxxx.yyyy.zzzz +) +a = ( + aaaa.bbbb.cccc.dddd.eeee.ffff.gggg.hhhh.iiii.jjjj.kkkk.llll.mmmm.nnnn.oooo.pppp + is qqqq.rrrr.ssss.tttt.uuuu.vvvv.xxxx.yyyy.zzzz +) +a = ( + aaaa.bbbb.cccc.dddd.eeee.ffff.gggg.hhhh.iiii.jjjj.kkkk.llll.mmmm.nnnn.oooo.pppp + is not qqqq.rrrr.ssss.tttt.uuuu.vvvv.xxxx.yyyy.zzzz +) +if ( + threading.current_thread() != threading.main_thread() + and threading.current_thread() != threading.main_thread() + or signal.getsignal(signal.SIGINT) != signal.default_int_handler +): + return True +if ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +): + return True +if ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + & aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +): + return True +if ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +): + return True +if ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +): + return True +if ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + * aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +): + return True +if ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + / aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +): + return True +if ( + ~aaaa.a + aaaa.b - aaaa.c * aaaa.d / aaaa.e + | aaaa.f & aaaa.g % aaaa.h ^ aaaa.i << aaaa.k >> aaaa.l ** aaaa.m // aaaa.n +): + return True +if ( + ~aaaaaaaa.a + aaaaaaaa.b - aaaaaaaa.c @ aaaaaaaa.d / aaaaaaaa.e + | aaaaaaaa.f & aaaaaaaa.g % aaaaaaaa.h + ^ aaaaaaaa.i << aaaaaaaa.k >> aaaaaaaa.l ** aaaaaaaa.m // aaaaaaaa.n +): + return True +if ( + ~aaaaaaaaaaaaaaaa.a + + aaaaaaaaaaaaaaaa.b + - aaaaaaaaaaaaaaaa.c * aaaaaaaaaaaaaaaa.d @ aaaaaaaaaaaaaaaa.e + | aaaaaaaaaaaaaaaa.f & aaaaaaaaaaaaaaaa.g % aaaaaaaaaaaaaaaa.h + ^ aaaaaaaaaaaaaaaa.i + << aaaaaaaaaaaaaaaa.k + >> aaaaaaaaaaaaaaaa.l ** aaaaaaaaaaaaaaaa.m // aaaaaaaaaaaaaaaa.n +): + return True +aaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaa - aaaaaaaaaaaaaaaa * ( + aaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaa +) / (aaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaa) +aaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa >> aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa << aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +bbbb >> bbbb * bbbb +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ^ bbbb.a & aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ^ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + +a += B +a[x] @= foo().bar +this.is_not >>= a.monad +last_call() +# standalone comment at ENDMARKER diff --git a/native/libcst/tests/fixtures/expr_statement.py b/native/libcst/tests/fixtures/expr_statement.py new file mode 100644 index 00000000..4ef73f08 --- /dev/null +++ b/native/libcst/tests/fixtures/expr_statement.py @@ -0,0 +1,11 @@ +1 +1, 2, 3 +x = 1 +x = 1, 2, 3 +x = y = z = 1, 2, 3 +x, y, z = 1, 2, 3 +abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) + +( ( ( ... ) ) ) + +a , = b \ No newline at end of file diff --git a/native/libcst/tests/fixtures/fun_with_func_defs.py b/native/libcst/tests/fixtures/fun_with_func_defs.py new file mode 100644 index 00000000..1a78d687 --- /dev/null +++ b/native/libcst/tests/fixtures/fun_with_func_defs.py @@ -0,0 +1,146 @@ +def f(a, /,): pass +def f(a, /, c, d, e): pass +def f(a, /, c, *, d, e): pass +def f(a, /, c, *, d, e, **kwargs): pass +def f(a=1, /,): pass +def f(a=1, /, b=2, c=4): pass +def f(a=1, /, b=2, *, c=4): pass +def f(a=1, /, b=2, *, c): pass +def f(a=1, /, b=2, *, c=4, **kwargs): pass +def f(a=1, /, b=2, *, c, **kwargs,): pass + + +def g( + a, + /, +): + pass + + +def f(a, /, c, d, e): + pass + + +def f(a, /, c, *, d, e): + pass + + +def f( + a, + /, + c, + *, + d, + e, + **kwargs, +): + pass + + +def f( + a=1, + /, +): + pass + + +def f(a=1, /, b=2, c=4): + pass + + +def f(a=1, /, b=2, *, c=4): + pass + + +def f(a=1, /, b=2, *, c): + pass + + +def f( + a=1, + /, + b=2, + *, + c=4, + **kwargs, +): + pass + + +def f( + a=1, + /, + b=2, + *, + c, + **kwargs, +): + pass + + +async def foo ( + bar : Baz , +) -> zooooooooom : ... + + +async def foo(bar : Baz = 0 ) : ... + +async def foo() -> Bar: ... + +async def outer( + foo +) -> Bar : + def inner(lol: Lol) -> None: + async def core (): + await lol + def second(inner): + pass + +def stars ( + yes : bool = True , + / , + noes : List[bool] = [ * falses ], + * all : The[Rest], + but : Wait[Theres[More]] , + ** it : ends[now] , + +) -> ret: + pass + +def stars ( + yes : bool = True , + / , + noes : List[bool] = [ * falses ], + * all : The[Rest], + but : Wait[Theres[More]] , + ** it : ends[now[without_a_comma]] + +) -> ret : + pass + + +def foo(bar: (yield)) -> (yield): something: (yield another) + +def foo( bar: (yield)) -> (yield) : + something: (yield another) + return 3 # no + return # yes + + +def f(): + for (yield 1)[1] in [1]: + pass + + +@decorators +# foo +@woohoo +def f(): + pass + +@getattr(None, '', lambda a: lambda b: a(b+1)) +def f(): ... + + +@a(now_this = lol) +def f(): ... diff --git a/native/libcst/tests/fixtures/global_nonlocal.py b/native/libcst/tests/fixtures/global_nonlocal.py new file mode 100644 index 00000000..a9839aef --- /dev/null +++ b/native/libcst/tests/fixtures/global_nonlocal.py @@ -0,0 +1,4 @@ +global a +global b , c, d +nonlocal a +nonlocal a , b \ No newline at end of file diff --git a/native/libcst/tests/fixtures/import.py b/native/libcst/tests/fixtures/import.py new file mode 100644 index 00000000..571e3640 --- /dev/null +++ b/native/libcst/tests/fixtures/import.py @@ -0,0 +1,19 @@ +# 'import' dotted_as_names +import sys +import time, sys +# 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) +from time import time +from time import (time) +from sys import path, argv +from sys import (path, argv) +from sys import (path, argv,) +from sys import * + + +from a import (b, ) +from . import a +from .a import b +from ... import a +from ...a import b +from .... import a +from ...... import a \ No newline at end of file diff --git a/native/libcst/tests/fixtures/indents_but_no_eol_before_eof.py b/native/libcst/tests/fixtures/indents_but_no_eol_before_eof.py new file mode 100644 index 00000000..22fd1309 --- /dev/null +++ b/native/libcst/tests/fixtures/indents_but_no_eol_before_eof.py @@ -0,0 +1,4 @@ +if 1: + if 2: + if 3: + pass \ No newline at end of file diff --git a/native/libcst/tests/fixtures/just_a_comment_without_nl.py b/native/libcst/tests/fixtures/just_a_comment_without_nl.py new file mode 100644 index 00000000..56e1a2f8 --- /dev/null +++ b/native/libcst/tests/fixtures/just_a_comment_without_nl.py @@ -0,0 +1 @@ +# just a comment without a newline \ No newline at end of file diff --git a/native/libcst/tests/fixtures/raise.py b/native/libcst/tests/fixtures/raise.py new file mode 100644 index 00000000..28613862 --- /dev/null +++ b/native/libcst/tests/fixtures/raise.py @@ -0,0 +1,4 @@ +raise +raise foo +raise foo from bar +raise lol() from f() + 1 \ No newline at end of file diff --git a/native/libcst/tests/fixtures/smol_statements.py b/native/libcst/tests/fixtures/smol_statements.py new file mode 100644 index 00000000..93687bce --- /dev/null +++ b/native/libcst/tests/fixtures/smol_statements.py @@ -0,0 +1,4 @@ +def f(): + pass ; break ; continue ; return ; return foo + + assert foo , bar ; a += 2 \ No newline at end of file diff --git a/native/libcst/tests/fixtures/spacious_spaces.py b/native/libcst/tests/fixtures/spacious_spaces.py new file mode 100644 index 00000000..5c979eee --- /dev/null +++ b/native/libcst/tests/fixtures/spacious_spaces.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/native/libcst/tests/fixtures/suicidal_slices.py b/native/libcst/tests/fixtures/suicidal_slices.py new file mode 100644 index 00000000..8d9566e8 --- /dev/null +++ b/native/libcst/tests/fixtures/suicidal_slices.py @@ -0,0 +1,28 @@ +slice[0] +slice[0:1] +slice[0:1:2] +slice[:] +slice[:-1] +slice[1:] +slice[::-1] +slice[d :: d + 1] +slice[:c, c - 1] +numpy[:, 0:1] +numpy[:, :-1] +numpy[0, :] +numpy[:, i] +numpy[0, :2] +numpy[:N, 0] +numpy[:2, :4] +numpy[2:4, 1:5] +numpy[4:, 2:] +numpy[:, (0, 1, 2, 5)] +numpy[0, [0]] +numpy[:, [i]] +numpy[1 : c + 1, c] +numpy[-(c + 1) :, d] +numpy[:, l[-2]] +numpy[:, ::-1] +numpy[np.newaxis, :] + +( spaces [:: , a : , a : a : a , ] ) \ No newline at end of file diff --git a/native/libcst/tests/fixtures/super_strings.py b/native/libcst/tests/fixtures/super_strings.py new file mode 100644 index 00000000..76b1b2dd --- /dev/null +++ b/native/libcst/tests/fixtures/super_strings.py @@ -0,0 +1,28 @@ +_ = "" +_ = '' +_ = """""" +_ = '''''' + +_ = 'a' "string" 'that' r"is" 'concatenated ' + +b"string " +b"and non f" rb'string' + +( + "parenthesized" + "concatenated" + """triple + quoted + """ + +) + +_ = f"string" + +f"string" "bonanza" f'starts' r"""here""" + +_ = f"something {{**not** an expression}} {but(this._is)} {{and this isn't.}} end" + +_(f"ok { expr = !r: aosidjhoi } end") + +print(f"{self.ERASE_CURRENT_LINE}{self._human_seconds(elapsed_time)} {percent:.{self.pretty_precision}f}% complete, {self.estimate_completion(elapsed_time, finished, left)} estimated for {left} files to go...") diff --git a/native/libcst/tests/fixtures/terrible_tries.py b/native/libcst/tests/fixtures/terrible_tries.py new file mode 100644 index 00000000..91d6831e --- /dev/null +++ b/native/libcst/tests/fixtures/terrible_tries.py @@ -0,0 +1,71 @@ +#foo. + +try : + bar() + +finally : + pass + + +try : + pass + + # foo + +except lol as LOL : + + pass + +except : + + # foo + + pass + +else : + + pass + +finally : + + foo + +try: + pass +except: + pass +finally: + pass + + +try: + + # 1 + + try: + + # 2 + + pass + + # 3 + + # 4 + + finally: + + # 5 + + pass + + # 6 + + # 7 + +except foo: + + #8 + + pass + + #9 diff --git a/native/libcst/tests/fixtures/trailing_comment_without_nl.py b/native/libcst/tests/fixtures/trailing_comment_without_nl.py new file mode 100644 index 00000000..0c58f18d --- /dev/null +++ b/native/libcst/tests/fixtures/trailing_comment_without_nl.py @@ -0,0 +1,4 @@ + + + +# hehehe >:) \ No newline at end of file diff --git a/native/libcst/tests/fixtures/tuple_shenanigans.py b/native/libcst/tests/fixtures/tuple_shenanigans.py new file mode 100644 index 00000000..f31c6452 --- /dev/null +++ b/native/libcst/tests/fixtures/tuple_shenanigans.py @@ -0,0 +1,28 @@ +(1, 2) +(1, 2, 3) + +# alright here we go. + +() +( # evil >:) + # evil >:( +) # ... +(1,) +( * 1 , * 2 ,) +*_ = (l,) +() = x +( ) = ( x, ) +(x) = (x) +( x , ) = x +( x , *y , * z , ) = l +( x , *y , * z , ) = ( x , *y , * z , ) = ( x , *y , * z , x ) +( + x , # :) + bar, * + baz + , +) =\ +( + (let, *s, ( ) ) , + nest , them , ( * t , * u , * p , l , * e , s , ) +) \ No newline at end of file diff --git a/native/libcst/tests/fixtures/vast_emptiness.py b/native/libcst/tests/fixtures/vast_emptiness.py new file mode 100644 index 00000000..e69de29b diff --git a/native/libcst/tests/fixtures/with_wickedness.py b/native/libcst/tests/fixtures/with_wickedness.py new file mode 100644 index 00000000..7cb5c67d --- /dev/null +++ b/native/libcst/tests/fixtures/with_wickedness.py @@ -0,0 +1,13 @@ +# with_wickedness + +with foo : ... + +async def f(): + async with foo as bar: + + with bar: + pass + + async with foo(1+1) as bar , 1 as (a, b, ) , 2 as [a, b] , 3 as a[b] : + pass + diff --git a/native/libcst/tests/fixtures/wonky_walrus.py b/native/libcst/tests/fixtures/wonky_walrus.py new file mode 100644 index 00000000..d0916ab8 --- /dev/null +++ b/native/libcst/tests/fixtures/wonky_walrus.py @@ -0,0 +1,13 @@ +( foo := 5 ) + +any((lastNum := num) == 1 for num in [1, 2, 3]) + +[(lastNum := num) == 1 for num in [1, 2, 3]] + +while f := x(): + pass + +if f := x(): pass + +f(y:=1) +f(x, y := 1 ) \ No newline at end of file diff --git a/native/libcst/tests/parser_roundtrip.rs b/native/libcst/tests/parser_roundtrip.rs new file mode 100644 index 00000000..2b8c79dd --- /dev/null +++ b/native/libcst/tests/parser_roundtrip.rs @@ -0,0 +1,50 @@ +use difference::assert_diff; +use itertools::Itertools; +use libcst_native::{parse_module, prettify_error, Codegen}; +use std::{ + iter::once, + path::{Component, PathBuf}, +}; + +fn all_fixtures() -> impl Iterator { + let mut path = PathBuf::from(file!()); + path.pop(); + path = path + .components() + .skip(1) + .chain(once(Component::Normal("fixtures".as_ref()))) + .collect(); + + path.read_dir().expect("read_dir").into_iter().map(|file| { + let path = file.unwrap().path(); + let contents = std::fs::read_to_string(&path).expect("reading file"); + (path, contents) + }) +} + +#[test] +fn roundtrip_fixtures() { + for (path, input) in all_fixtures() { + let input = if let Some(stripped) = input.strip_prefix('\u{feff}') { + stripped + } else { + &input + }; + let m = match parse_module(input, None) { + Ok(m) => m, + Err(e) => panic!("{}", prettify_error(e, format!("{:#?}", path).as_ref())), + }; + let mut state = Default::default(); + m.codegen(&mut state); + let generated = state.to_string(); + if generated != input { + let got = visualize(&generated); + let expected = visualize(input); + assert_diff!(expected.as_ref(), got.as_ref(), "", 0); + } + } +} + +fn visualize(s: &str) -> String { + s.replace(' ', "▩").lines().join("↩\n") +} diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml new file mode 100644 index 00000000..95bf4d2d --- /dev/null +++ b/native/libcst_derive/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "libcst_derive" +version = "0.1.0" +edition = "2018" + +[lib] +proc-macro = true + +[dependencies] +syn = "1.0" +quote = "1.0" diff --git a/native/libcst_derive/src/codegen.rs b/native/libcst_derive/src/codegen.rs new file mode 100644 index 00000000..208d5104 --- /dev/null +++ b/native/libcst_derive/src/codegen.rs @@ -0,0 +1,63 @@ +use proc_macro::TokenStream; +use quote::{quote, quote_spanned}; +use syn::{self, spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; + +pub(crate) fn impl_codegen(ast: &DeriveInput) -> TokenStream { + match &ast.data { + Data::Enum(e) => impl_enum(ast, e), + Data::Struct(s) => quote_spanned! { + s.struct_token.span() => + compile_error!("Struct type is not supported") + } + .into(), + Data::Union(u) => quote_spanned! { + u.union_token.span() => + compile_error!("Union type is not supported") + } + .into(), + } +} + +fn impl_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { + let mut varnames = vec![]; + for var in e.variants.iter() { + match &var.fields { + Fields::Named(n) => { + return quote_spanned! { + n.span() => + compile_error!("Named enum fields not supported") + } + .into() + } + f @ Fields::Unit => { + return quote_spanned! { + f.span() => + compile_error!("Empty enum variants not supported") + } + .into() + } + Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { + if unnamed.len() > 1 { + return quote_spanned! { + unnamed.span() => + compile_error!("Multiple unnamed fields not supported") + } + .into(); + } + varnames.push(&var.ident); + } + } + } + let ident = &ast.ident; + let generics = &ast.generics; + let gen = quote! { + impl<'a> Codegen<'a> for #ident #generics { + fn codegen(&self, state: &mut CodegenState<'a>) { + match self { + #(Self::#varnames(x) => x.codegen(state),)* + } + } + } + }; + gen.into() +} diff --git a/native/libcst_derive/src/inflate.rs b/native/libcst_derive/src/inflate.rs new file mode 100644 index 00000000..6223a256 --- /dev/null +++ b/native/libcst_derive/src/inflate.rs @@ -0,0 +1,63 @@ +use proc_macro::TokenStream; +use quote::{quote, quote_spanned}; +use syn::{self, spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; + +pub(crate) fn impl_inflate(ast: &DeriveInput) -> TokenStream { + match &ast.data { + Data::Enum(e) => impl_inflate_enum(ast, e), + Data::Struct(s) => quote_spanned! { + s.struct_token.span() => + compile_error!("Struct type is not supported") + } + .into(), + Data::Union(u) => quote_spanned! { + u.union_token.span() => + compile_error!("Union type is not supported") + } + .into(), + } +} + +fn impl_inflate_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { + let mut varnames = vec![]; + for var in e.variants.iter() { + match &var.fields { + Fields::Named(n) => { + return quote_spanned! { + n.span() => + compile_error!("Named enum fields not supported") + } + .into() + } + f @ Fields::Unit => { + return quote_spanned! { + f.span() => + compile_error!("Empty enum variants not supported") + } + .into() + } + Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { + if unnamed.len() > 1 { + return quote_spanned! { + unnamed.span() => + compile_error!("Multiple unnamed fields not supported") + } + .into(); + } + varnames.push(&var.ident); + } + } + } + let ident = &ast.ident; + let generics = &ast.generics; + let gen = quote! { + impl<'a> Inflate<'a> for #ident #generics { + fn inflate(mut self, config: & crate::tokenizer::whitespace_parser::Config<'a>) -> std::result::Result { + match self { + #(Self::#varnames(x) => Ok(Self::#varnames(x.inflate(config)?)),)* + } + } + } + }; + gen.into() +} diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs new file mode 100644 index 00000000..26da5fca --- /dev/null +++ b/native/libcst_derive/src/into_py.rs @@ -0,0 +1,177 @@ +use proc_macro::TokenStream; +use quote::{format_ident, quote, quote_spanned, ToTokens}; +use syn::{ + spanned::Spanned, Attribute, Data, DataEnum, DataStruct, DeriveInput, Fields, FieldsNamed, + FieldsUnnamed, Type, TypePath, Visibility, +}; + +pub(crate) fn impl_into_py(ast: &DeriveInput) -> TokenStream { + match &ast.data { + Data::Enum(e) => impl_into_py_enum(ast, e), + Data::Struct(s) => impl_into_py_struct(ast, s), + Data::Union(u) => quote_spanned! { + u.union_token.span() => + compile_error!("Union type is not supported") + } + .into(), + } +} + +fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { + let mut toks = vec![]; + for var in e.variants.iter() { + let varname = &var.ident; + match &var.fields { + Fields::Named(n) => { + let mut fieldnames = vec![]; + for field in n.named.iter() { + if has_attr(&field.attrs, "skip_py") { + continue; + } + fieldnames.push(field.ident.as_ref().unwrap()); + } + let kwargs_toks = fields_to_kwargs(&var.fields, true); + toks.push(quote! { + Self::#varname { #(#fieldnames,)* .. } => { + let libcst = pyo3::types::PyModule::import(py, "libcst").expect("libcst couldn't be imported"); + let kwargs = #kwargs_toks ; + libcst + .getattr(stringify!(#varname)) + .expect(stringify!(no #varname found in libcst)) + .call((), Some(kwargs)) + .expect(stringify!(conversion failed for #varname)) + .into() + } + }) + } + f @ Fields::Unit => { + return quote_spanned! { + f.span() => + compile_error!("Empty enum variants not supported") + } + .into() + } + Fields::Unnamed(_) => { + toks.push(quote! { + Self::#varname(x, ..) => x.into_py(py), + }); + } + } + } + let ident = &ast.ident; + let generics = &ast.generics; + let gen = quote! { + use pyo3::types::IntoPyDict as _; + #[automatically_derived] + impl#generics pyo3::conversion::IntoPy for #ident #generics { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + match self { + #(#toks)* + } + } + } + }; + gen.into() +} + +fn impl_into_py_struct(ast: &DeriveInput, e: &DataStruct) -> TokenStream { + let kwargs_toks = fields_to_kwargs(&e.fields, false); + let ident = &ast.ident; + let generics = &ast.generics; + let gen = quote! { + use pyo3::types::IntoPyDict as _; + #[automatically_derived] + impl#generics pyo3::conversion::IntoPy for #ident #generics { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + let libcst = pyo3::types::PyModule::import(py, "libcst").expect("libcst couldn't be imported"); + let kwargs = #kwargs_toks ; + libcst + .getattr(stringify!(#ident)) + .expect(stringify!(no #ident found in libcst)) + .call((), Some(kwargs)) + .expect(stringify!(conversion failed for #ident)) + .into() + } + } + }; + gen.into() +} + +fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenStream { + let mut empty_kwargs = false; + let mut py_varnames = vec![]; + let mut rust_varnames = vec![]; + let mut optional_py_varnames = vec![]; + let mut optional_rust_varnames = vec![]; + match &fields { + Fields::Named(FieldsNamed { named, .. }) => { + for field in named.iter() { + if has_attr(&field.attrs, "skip_py") { + continue; + } + if let Some(ident) = field.ident.as_ref() { + let include = if let Visibility::Public(_) = field.vis { + true + } else { + is_enum + }; + if include { + let pyname = format_ident!("{}", ident); + let rustname = if is_enum { + ident.to_token_stream() + } else { + quote! { self.#ident } + }; + if !has_attr(&field.attrs, "no_py_default") { + if let Type::Path(TypePath { path, .. }) = &field.ty { + if let Some(first) = path.segments.first() { + if first.ident == "Option" { + optional_py_varnames.push(pyname); + optional_rust_varnames.push(rustname); + continue; + } + } + } + } + py_varnames.push(pyname); + rust_varnames.push(rustname); + } + } + } + empty_kwargs = py_varnames.is_empty() && optional_py_varnames.is_empty(); + } + Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { + if unnamed.first().is_some() { + py_varnames.push(format_ident!("value")); + rust_varnames.push(quote! { self.0 }); + } else { + empty_kwargs = true; + } + } + Fields::Unit => { + empty_kwargs = true; + } + }; + let kwargs_pairs = quote! { + #(Some((stringify!(#py_varnames), #rust_varnames.into_py(py))),)* + }; + let optional_pairs = quote! { + #(#optional_rust_varnames.map(|x| (stringify!(#optional_py_varnames), x.into_py(py))),)* + }; + if empty_kwargs { + quote! { pyo3::types::PyDict::new(py) } + } else { + quote! { + [ #kwargs_pairs #optional_pairs ] + .iter() + .filter(|x| x.is_some()) + .map(|x| x.as_ref().unwrap()) + .collect::>() + .into_py_dict(py) + } + } +} + +fn has_attr(attrs: &[Attribute], name: &'static str) -> bool { + attrs.iter().any(|attr| attr.path.is_ident(name)) +} diff --git a/native/libcst_derive/src/lib.rs b/native/libcst_derive/src/lib.rs new file mode 100644 index 00000000..4a687bec --- /dev/null +++ b/native/libcst_derive/src/lib.rs @@ -0,0 +1,31 @@ +mod inflate; +use inflate::impl_inflate; +mod parenthesized_node; +use parenthesized_node::impl_parenthesized_node; +mod codegen; +use codegen::impl_codegen; +mod into_py; +use into_py::impl_into_py; + +use proc_macro::TokenStream; + +#[proc_macro_derive(Inflate)] +pub fn inflate_derive(input: TokenStream) -> TokenStream { + let ast = syn::parse(input).unwrap(); + impl_inflate(&ast) +} + +#[proc_macro_derive(ParenthesizedNode)] +pub fn parenthesized_node_derive(input: TokenStream) -> TokenStream { + impl_parenthesized_node(&syn::parse(input).unwrap()) +} + +#[proc_macro_derive(Codegen)] +pub fn parenthesized_node_codegen(input: TokenStream) -> TokenStream { + impl_codegen(&syn::parse(input).unwrap()) +} + +#[proc_macro_derive(IntoPy, attributes(skip_py, no_py_default))] +pub fn into_py(input: TokenStream) -> TokenStream { + impl_into_py(&syn::parse(input).unwrap()) +} diff --git a/native/libcst_derive/src/parenthesized_node.rs b/native/libcst_derive/src/parenthesized_node.rs new file mode 100644 index 00000000..52d2aab4 --- /dev/null +++ b/native/libcst_derive/src/parenthesized_node.rs @@ -0,0 +1,93 @@ +use proc_macro::TokenStream; +use quote::{quote, quote_spanned}; +use syn::{spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; + +pub(crate) fn impl_parenthesized_node(ast: &DeriveInput) -> TokenStream { + match &ast.data { + Data::Enum(e) => impl_enum(ast, e), + Data::Struct(_) => impl_struct(ast), + Data::Union(u) => quote_spanned! { + u.union_token.span() => + compile_error!("Union type is not supported") + } + .into(), + } +} + +fn impl_struct(ast: &DeriveInput) -> TokenStream { + let ident = &ast.ident; + let generics = &ast.generics; + let gen = quote! { + impl<'a> ParenthesizedNode<'a> for #ident #generics { + fn lpar(&self) -> &Vec> { + &self.lpar + } + fn rpar(&self) -> &Vec> { + &self.rpar + } + fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { + let mut lpar = self.lpar; + let mut rpar = self.rpar; + lpar.insert(0, left); + rpar.push(right); + #[allow(clippy::needless_update)] + Self { lpar, rpar, ..self } + } + } + }; + gen.into() +} + +fn impl_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { + let mut varnames = vec![]; + for var in e.variants.iter() { + match &var.fields { + Fields::Named(n) => { + return quote_spanned! { + n.span() => + compile_error!("Named enum fields not supported") + } + .into() + } + f @ Fields::Unit => { + return quote_spanned! { + f.span() => + compile_error!("Empty enum variants not supported") + } + .into() + } + Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { + if unnamed.len() > 1 { + return quote_spanned! { + unnamed.span() => + compile_error!("Multiple unnamed fields not supported") + } + .into(); + } + varnames.push(&var.ident); + } + } + } + let ident = &ast.ident; + let generics = &ast.generics; + let gen = quote! { + impl<'a> ParenthesizedNode<'a> for #ident #generics { + fn lpar(&self) -> &Vec> { + match self { + #(Self::#varnames(x) => x.lpar(),)* + } + } + fn rpar(&self) -> &Vec> { + match self { + #(Self::#varnames(x) => x.rpar(),)* + } + } + fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { + match self { + #(Self::#varnames(x) => Self::#varnames(x.with_parens(left, right)),)* + } + } + } + }; + gen.into() +} diff --git a/native/roundtrip.sh b/native/roundtrip.sh new file mode 100755 index 00000000..3f732143 --- /dev/null +++ b/native/roundtrip.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +PARSE=$(dirname $0)/target/release/parse + +exec diff -u "$1" <($PARSE < "$1") diff --git a/pyproject.toml b/pyproject.toml index c9a93f4d..3f372901 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,9 @@ [tool.black] target-version = ["py36"] +exclude = "native/.*" + +[tool.ufmt] +excludes = ["native/", "stubs/"] + +[build-system] +requires = ["setuptools", "wheel", "setuptools-rust"] diff --git a/requirements-dev.txt b/requirements-dev.txt index 5bf3217b..9223d3a3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,11 +6,13 @@ git+https://github.com/jimmylai/sphinx.git@slots_type_annotation hypothesis>=4.36.0 hypothesmith>=0.0.4 jupyter>=1.0.0 +maturin>=0.8.3,<0.9 nbsphinx>=0.4.2 prompt-toolkit>=2.0.9 pyre-check==0.9.3 setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 tox>=3.18.1 -ufmt==1.2 +ufmt==1.3 usort==0.6.3 +setuptools-rust>=0.12.1 diff --git a/setup.py b/setup.py index 593acd7b..4e35ca68 100644 --- a/setup.py +++ b/setup.py @@ -7,6 +7,8 @@ from os import path, environ import setuptools +from setuptools_rust import Binding, RustExtension + # Grab the readme so that our package stays in sync with github. this_directory: str = path.abspath(path.dirname(__file__)) @@ -50,6 +52,13 @@ setuptools.setup( if "=" in dep ], }, + rust_extensions=[ + RustExtension( + "libcst.native", + path="native/libcst/Cargo.toml", + binding=Binding.PyO3, + ) + ], classifiers=[ "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", diff --git a/stubs/libcst_native/parser_config.pyi b/stubs/libcst_native/parser_config.pyi new file mode 100644 index 00000000..0165df79 --- /dev/null +++ b/stubs/libcst_native/parser_config.pyi @@ -0,0 +1,44 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Any, FrozenSet, Mapping, Sequence + +from libcst._parser.parso.utils import PythonVersionInfo + +class BaseWhitespaceParserConfig: + def __new__( + cls, + *, + lines: Sequence[str], + default_newline: str, + ) -> BaseWhitespaceParserConfig: ... + + lines: Sequence[str] + default_newline: str + + +class ParserConfig(BaseWhitespaceParserConfig): + def __new__( + cls, + *, + lines: Sequence[str], + encoding: str, + default_indent: str, + default_newline: str, + has_trailing_newline: bool, + version: PythonVersionInfo, + future_imports: FrozenSet[str], + ) -> BaseWhitespaceParserConfig: ... + + # lines is inherited + encoding: str + default_indent: str + # default_newline is inherited + has_trailing_newline: bool + version: PythonVersionInfo + future_imports: FrozenSet[str] + + +def parser_config_asdict(config: ParserConfig) -> Mapping[str, Any]: ... diff --git a/stubs/libcst_native/token_type.pyi b/stubs/libcst_native/token_type.pyi new file mode 100644 index 00000000..a0dd8179 --- /dev/null +++ b/stubs/libcst_native/token_type.pyi @@ -0,0 +1,25 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +class TokenType: + name: str + contains_syntax: bool + +STRING: TokenType = ... +NAME: TokenType = ... +NUMBER: TokenType = ... +OP: TokenType = ... +NEWLINE: TokenType = ... +INDENT: TokenType = ... +DEDENT: TokenType = ... +ASYNC: TokenType = ... +AWAIT: TokenType = ... +FSTRING_START: TokenType = ... +FSTRING_STRING: TokenType = ... +FSTRING_END: TokenType = ... +ENDMARKER: TokenType = ... +# unused dummy tokens for backwards compat with the parso tokenizer +ERRORTOKEN: TokenType = ... +ERROR_DEDENT: TokenType = ... diff --git a/stubs/libcst_native/tokenize.pyi b/stubs/libcst_native/tokenize.pyi new file mode 100644 index 00000000..bbcbeab0 --- /dev/null +++ b/stubs/libcst_native/tokenize.pyi @@ -0,0 +1,29 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Iterator, Optional, Tuple + +from libcst_native import token_type, whitespace_state + +class Token: + def __new__( + cls, + type: token_type.TokenType, + string: str, + start_pos: Tuple[int, int], + end_pos: Tuple[int, int], + whitespace_before: whitespace_state.WhitespaceState, + whitespace_after: whitespace_state.WhitespaceState, + relative_indent: Optional[str], + ) -> Token: ... + type: token_type.TokenType + string: str + start_pos: Tuple[int, int] + end_pos: Tuple[int, int] + whitespace_before: whitespace_state.WhitespaceState + whitespace_after: whitespace_state.WhitespaceState + relative_indent: Optional[str] + +def tokenize(text: str) -> Iterator[Token]: ... diff --git a/stubs/libcst_native/whitespace_parser.pyi b/stubs/libcst_native/whitespace_parser.pyi new file mode 100644 index 00000000..291f6dc6 --- /dev/null +++ b/stubs/libcst_native/whitespace_parser.pyi @@ -0,0 +1,28 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Optional, Sequence, Union + +from libcst._nodes.whitespace import ( + EmptyLine, + Newline, + ParenthesizedWhitespace, + SimpleWhitespace, + TrailingWhitespace, +) +from libcst._parser.types.config import BaseWhitespaceParserConfig as Config +from libcst._parser.types.whitespace_state import WhitespaceState as State + +def parse_simple_whitespace(config: Config, state: State) -> SimpleWhitespace: ... +def parse_empty_lines( + config: Config, + state: State, + *, + override_absolute_indent: Optional[str] = None, +) -> Sequence[EmptyLine]: ... +def parse_trailing_whitespace(config: Config, state: State) -> TrailingWhitespace: ... +def parse_parenthesizable_whitespace( + config: Config, state: State +) -> Union[SimpleWhitespace, ParenthesizedWhitespace]: ... diff --git a/stubs/libcst_native/whitespace_state.pyi b/stubs/libcst_native/whitespace_state.pyi new file mode 100644 index 00000000..da43bd54 --- /dev/null +++ b/stubs/libcst_native/whitespace_state.pyi @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +class WhitespaceState: + def __new__( + cls, line: int, column: int, absolute_indent: str, is_parenthesized: bool + ) -> WhitespaceState: ... + + line: int # one-indexed (to match parso's behavior) + column: int # zero-indexed (to match parso's behavior) + # What to look for when executing `_parse_indent`. + absolute_indent: str + is_parenthesized: bool From 10c3aa09a7b1426fe0f1c896a1c7ec25eb7de740 Mon Sep 17 00:00:00 2001 From: John Reese Date: Tue, 21 Dec 2021 14:55:04 -0800 Subject: [PATCH 163/632] =?UTF-8?q?Upgrade=20to=20=C2=B5sort=201.0.0rc1,?= =?UTF-8?q?=20and=20apply=20formatting=20changes=20(#565)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Upgrade to usort==1.0.0rc1 * Apply sorting changes from usort 1.0.0rc1 * reapply codegen Co-authored-by: Zsolt Dollenstein --- libcst/_batched_visitor.py | 4 +-- libcst/_exceptions.py | 2 +- libcst/_maybe_sentinel.py | 2 +- libcst/_metadata_dependent.py | 4 +-- libcst/_nodes/base.py | 2 +- libcst/_nodes/expression.py | 2 +- libcst/_nodes/internal.py | 2 +- libcst/_nodes/module.py | 4 +-- libcst/_nodes/tests/test_await.py | 2 +- libcst/_nodes/tests/test_cst_node.py | 2 +- libcst/_nodes/tests/test_docstring.py | 2 +- libcst/_nodes/tests/test_flatten_behavior.py | 2 +- libcst/_nodes/tests/test_for.py | 2 +- libcst/_nodes/tests/test_module.py | 2 +- libcst/_nodes/tests/test_removal_behavior.py | 2 +- libcst/_nodes/tests/test_simple_comp.py | 2 +- libcst/_nodes/tests/test_with.py | 2 +- libcst/_parser/base_parser.py | 2 +- libcst/_parser/entrypoints.py | 2 +- libcst/_parser/grammar.py | 4 +-- libcst/_parser/parso/tests/test_fstring.py | 2 +- libcst/_parser/parso/tests/test_tokenize.py | 2 +- libcst/_parser/parso/tests/test_utils.py | 2 +- libcst/_parser/py_whitespace_parser.py | 6 ++-- libcst/_parser/tests/test_detect_config.py | 4 +-- libcst/_parser/tests/test_footer_behavior.py | 2 +- libcst/_parser/tests/test_node_identity.py | 2 +- libcst/_parser/tests/test_parse_errors.py | 2 +- libcst/_parser/tests/test_version_compare.py | 2 +- .../_parser/tests/test_whitespace_parser.py | 2 +- libcst/_parser/tests/test_wrapped_tokenize.py | 4 +-- libcst/_parser/types/config.py | 2 +- libcst/_parser/types/tests/test_config.py | 2 +- libcst/_position.py | 2 +- libcst/_removal_sentinel.py | 2 +- libcst/_typed_visitor.py | 8 ++--- libcst/_typed_visitor_base.py | 2 +- libcst/codemod/__init__.py | 4 +-- libcst/codemod/_cli.py | 8 ++--- libcst/codemod/_runner.py | 2 +- libcst/codemod/_testing.py | 2 +- libcst/codemod/tests/test_cli.py | 2 +- libcst/codemod/tests/test_runner.py | 2 +- .../visitors/_apply_type_annotations.py | 2 +- .../_gather_string_annotation_names.py | 2 +- .../tests/test_apply_type_annotations.py | 2 +- libcst/helpers/tests/test_expression.py | 2 +- libcst/helpers/tests/test_statement.py | 2 +- libcst/matchers/__init__.py | 29 ++++++++++--------- libcst/matchers/_matcher_base.py | 4 +-- libcst/matchers/_return_types.py | 7 +++-- libcst/matchers/_visitors.py | 12 ++++---- libcst/matchers/tests/test_decorators.py | 4 +-- libcst/matchers/tests/test_visitors.py | 2 +- libcst/metadata/base_provider.py | 6 ++-- .../metadata/expression_context_provider.py | 2 +- libcst/metadata/full_repo_manager.py | 2 +- libcst/metadata/scope_provider.py | 2 +- .../tests/test_expression_context_provider.py | 2 +- .../tests/test_parent_node_provider.py | 2 +- .../metadata/tests/test_reentrant_codegen.py | 2 +- libcst/metadata/tests/test_scope_provider.py | 8 ++--- libcst/metadata/tests/test_span_provider.py | 2 +- .../tests/test_type_inference_provider.py | 2 +- libcst/metadata/wrapper.py | 6 ++-- libcst/tests/test_deep_clone.py | 2 +- libcst/tests/test_e2e.py | 4 +-- libcst/tests/test_exceptions.py | 2 +- libcst/tests/test_pyre_integration.py | 4 +-- libcst/tests/test_tabs.py | 2 +- libcst/tests/test_type_enforce.py | 4 +-- libcst/tool.py | 4 +-- requirements-dev.txt | 2 +- setup.py | 2 +- 74 files changed, 124 insertions(+), 122 deletions(-) diff --git a/libcst/_batched_visitor.py b/libcst/_batched_visitor.py index 68340108..1f2c1a45 100644 --- a/libcst/_batched_visitor.py +++ b/libcst/_batched_visitor.py @@ -5,14 +5,14 @@ import inspect from typing import ( - TYPE_CHECKING, Callable, + cast, Iterable, List, Mapping, MutableMapping, Optional, - cast, + TYPE_CHECKING, ) from libcst._metadata_dependent import MetadataDependent diff --git a/libcst/_exceptions.py b/libcst/_exceptions.py index 1e84aab0..b549602f 100644 --- a/libcst/_exceptions.py +++ b/libcst/_exceptions.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from enum import Enum, auto +from enum import auto, Enum from typing import Any, Callable, Iterable, Optional, Sequence, Tuple, Union from typing_extensions import final diff --git a/libcst/_maybe_sentinel.py b/libcst/_maybe_sentinel.py index dc968f95..f3cac711 100644 --- a/libcst/_maybe_sentinel.py +++ b/libcst/_maybe_sentinel.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from enum import Enum, auto +from enum import auto, Enum class MaybeSentinel(Enum): diff --git a/libcst/_metadata_dependent.py b/libcst/_metadata_dependent.py index 60423017..1a1ca61f 100644 --- a/libcst/_metadata_dependent.py +++ b/libcst/_metadata_dependent.py @@ -7,14 +7,14 @@ import inspect from abc import ABC from contextlib import contextmanager from typing import ( - TYPE_CHECKING, + cast, ClassVar, Collection, Iterator, Mapping, Type, + TYPE_CHECKING, TypeVar, - cast, ) if TYPE_CHECKING: diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index a1f659d9..21c6061c 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -6,7 +6,7 @@ from abc import ABC, abstractmethod from copy import deepcopy from dataclasses import dataclass, field, fields, replace -from typing import Any, Dict, List, Mapping, Sequence, TypeVar, Union, cast +from typing import Any, cast, Dict, List, Mapping, Sequence, TypeVar, Union from libcst._flatten_sentinel import FlattenSentinel from libcst._nodes.internal import CodegenState diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 25b372d1..cd177daa 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -9,7 +9,7 @@ from abc import ABC, abstractmethod from ast import literal_eval from contextlib import contextmanager from dataclasses import dataclass, field -from enum import Enum, auto +from enum import auto, Enum from tokenize import ( Floatnumber as FLOATNUMBER_RE, Imagnumber as IMAGNUMBER_RE, diff --git a/libcst/_nodes/internal.py b/libcst/_nodes/internal.py index b8294fb6..4b4dfcaa 100644 --- a/libcst/_nodes/internal.py +++ b/libcst/_nodes/internal.py @@ -6,7 +6,7 @@ from contextlib import contextmanager from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Sequence, Union +from typing import Iterable, Iterator, List, Optional, Sequence, TYPE_CHECKING, Union from libcst._add_slots import add_slots from libcst._flatten_sentinel import FlattenSentinel diff --git a/libcst/_nodes/module.py b/libcst/_nodes/module.py index 2bd26016..51643ff5 100644 --- a/libcst/_nodes/module.py +++ b/libcst/_nodes/module.py @@ -4,15 +4,15 @@ # LICENSE file in the root directory of this source tree. from dataclasses import dataclass -from typing import TYPE_CHECKING, Optional, Sequence, TypeVar, Union, cast +from typing import cast, Optional, Sequence, TYPE_CHECKING, TypeVar, Union from libcst._add_slots import add_slots from libcst._nodes.base import CSTNode from libcst._nodes.internal import CodegenState, visit_body_sequence, visit_sequence from libcst._nodes.statement import ( BaseCompoundStatement, - SimpleStatementLine, get_docstring_impl, + SimpleStatementLine, ) from libcst._nodes.whitespace import EmptyLine from libcst._removal_sentinel import RemovalSentinel diff --git a/libcst/_nodes/tests/test_await.py b/libcst/_nodes/tests/test_await.py index a07f2c62..aa404b44 100644 --- a/libcst/_nodes/tests/test_await.py +++ b/libcst/_nodes/tests/test_await.py @@ -6,7 +6,7 @@ from typing import Any import libcst as cst -from libcst import PartialParserConfig, parse_expression, parse_statement +from libcst import parse_expression, parse_statement, PartialParserConfig from libcst._nodes.tests.base import CSTNodeTest from libcst.metadata import CodeRange from libcst.testing.utils import data_provider diff --git a/libcst/_nodes/tests/test_cst_node.py b/libcst/_nodes/tests/test_cst_node.py index 15f0f7ab..84611184 100644 --- a/libcst/_nodes/tests/test_cst_node.py +++ b/libcst/_nodes/tests/test_cst_node.py @@ -10,7 +10,7 @@ import libcst as cst from libcst._removal_sentinel import RemovalSentinel from libcst._types import CSTNodeT from libcst._visitors import CSTTransformer -from libcst.testing.utils import UnitTest, data_provider, none_throws +from libcst.testing.utils import data_provider, none_throws, UnitTest _EMPTY_SIMPLE_WHITESPACE = cst.SimpleWhitespace("") diff --git a/libcst/_nodes/tests/test_docstring.py b/libcst/_nodes/tests/test_docstring.py index 6e05d0fa..8bb8b555 100644 --- a/libcst/_nodes/tests/test_docstring.py +++ b/libcst/_nodes/tests/test_docstring.py @@ -9,7 +9,7 @@ from typing import Optional import libcst as cst from libcst.helpers import ensure_type -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class DocstringTest(UnitTest): diff --git a/libcst/_nodes/tests/test_flatten_behavior.py b/libcst/_nodes/tests/test_flatten_behavior.py index 5f37067c..d373b61d 100644 --- a/libcst/_nodes/tests/test_flatten_behavior.py +++ b/libcst/_nodes/tests/test_flatten_behavior.py @@ -6,7 +6,7 @@ from typing import Type, Union import libcst as cst -from libcst import FlattenSentinel, RemovalSentinel, parse_expression, parse_module +from libcst import FlattenSentinel, parse_expression, parse_module, RemovalSentinel from libcst._nodes.tests.base import CSTNodeTest from libcst._types import CSTNodeT from libcst._visitors import CSTTransformer diff --git a/libcst/_nodes/tests/test_for.py b/libcst/_nodes/tests/test_for.py index ffc91b48..9fbc3e49 100644 --- a/libcst/_nodes/tests/test_for.py +++ b/libcst/_nodes/tests/test_for.py @@ -6,7 +6,7 @@ from typing import Any import libcst as cst -from libcst import PartialParserConfig, parse_statement +from libcst import parse_statement, PartialParserConfig from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock from libcst.metadata import CodeRange from libcst.testing.utils import data_provider diff --git a/libcst/_nodes/tests/test_module.py b/libcst/_nodes/tests/test_module.py index 57a8fd43..3177c578 100644 --- a/libcst/_nodes/tests/test_module.py +++ b/libcst/_nodes/tests/test_module.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Tuple, cast +from typing import cast, Tuple import libcst as cst from libcst import parse_module, parse_statement diff --git a/libcst/_nodes/tests/test_removal_behavior.py b/libcst/_nodes/tests/test_removal_behavior.py index aeb9745f..e97d3681 100644 --- a/libcst/_nodes/tests/test_removal_behavior.py +++ b/libcst/_nodes/tests/test_removal_behavior.py @@ -6,7 +6,7 @@ from typing import Type, Union import libcst as cst -from libcst import RemovalSentinel, parse_module +from libcst import parse_module, RemovalSentinel from libcst._nodes.tests.base import CSTNodeTest from libcst._types import CSTNodeT from libcst._visitors import CSTTransformer diff --git a/libcst/_nodes/tests/test_simple_comp.py b/libcst/_nodes/tests/test_simple_comp.py index cf73176b..e7dfdc76 100644 --- a/libcst/_nodes/tests/test_simple_comp.py +++ b/libcst/_nodes/tests/test_simple_comp.py @@ -6,7 +6,7 @@ from typing import Any, Callable import libcst as cst -from libcst import PartialParserConfig, parse_expression, parse_statement +from libcst import parse_expression, parse_statement, PartialParserConfig from libcst._nodes.tests.base import CSTNodeTest from libcst.metadata import CodeRange from libcst.testing.utils import data_provider diff --git a/libcst/_nodes/tests/test_with.py b/libcst/_nodes/tests/test_with.py index 7a53c2be..4da84b35 100644 --- a/libcst/_nodes/tests/test_with.py +++ b/libcst/_nodes/tests/test_with.py @@ -6,7 +6,7 @@ from typing import Any import libcst as cst -from libcst import PartialParserConfig, parse_statement +from libcst import parse_statement, PartialParserConfig from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange diff --git a/libcst/_parser/base_parser.py b/libcst/_parser/base_parser.py index 35dafba4..04b9bd11 100644 --- a/libcst/_parser/base_parser.py +++ b/libcst/_parser/base_parser.py @@ -28,9 +28,9 @@ from typing import Generic, Iterable, List, Sequence, TypeVar, Union from libcst._exceptions import ( EOFSentinel, + get_expected_str, ParserSyntaxError, PartialParserSyntaxError, - get_expected_str, ) from libcst._parser.parso.pgen2.generator import DFAState, Grammar, ReservedString from libcst._parser.parso.python.token import TokenType diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index 28966cd1..af58301a 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -39,7 +39,7 @@ def _parse( detect_default_newline: bool, ) -> CSTNode: if is_native(): - from libcst.native import parse_module, parse_expression, parse_statement + from libcst.native import parse_expression, parse_module, parse_statement encoding, source_str = convert_to_utf8(source, partial=config) diff --git a/libcst/_parser/grammar.py b/libcst/_parser/grammar.py index b86483c0..7e678acc 100644 --- a/libcst/_parser/grammar.py +++ b/libcst/_parser/grammar.py @@ -133,9 +133,9 @@ from libcst._parser.conversions.terminals import ( convert_OP, convert_STRING, ) -from libcst._parser.parso.pgen2.generator import Grammar, generate_grammar +from libcst._parser.parso.pgen2.generator import generate_grammar, Grammar from libcst._parser.parso.python.token import PythonTokenTypes, TokenType -from libcst._parser.parso.utils import PythonVersionInfo, parse_version_string +from libcst._parser.parso.utils import parse_version_string, PythonVersionInfo from libcst._parser.production_decorator import get_productions from libcst._parser.types.config import AutoConfig from libcst._parser.types.conversions import NonterminalConversion, TerminalConversion diff --git a/libcst/_parser/parso/tests/test_fstring.py b/libcst/_parser/parso/tests/test_fstring.py index 6851e8b1..255366bb 100644 --- a/libcst/_parser/parso/tests/test_fstring.py +++ b/libcst/_parser/parso/tests/test_fstring.py @@ -15,7 +15,7 @@ # pyre-unsafe from libcst._parser.parso.python.tokenize import tokenize from libcst._parser.parso.utils import parse_version_string -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class ParsoTokenizeTest(UnitTest): diff --git a/libcst/_parser/parso/tests/test_tokenize.py b/libcst/_parser/parso/tests/test_tokenize.py index d0834ad7..c8180047 100644 --- a/libcst/_parser/parso/tests/test_tokenize.py +++ b/libcst/_parser/parso/tests/test_tokenize.py @@ -20,7 +20,7 @@ from textwrap import dedent from libcst._parser.parso.python.token import PythonTokenTypes from libcst._parser.parso.python.tokenize import PythonToken, tokenize from libcst._parser.parso.utils import parse_version_string, split_lines -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest # To make it easier to access some of the token types, just put them here. NAME = PythonTokenTypes.NAME diff --git a/libcst/_parser/parso/tests/test_utils.py b/libcst/_parser/parso/tests/test_utils.py index 17bdb8a0..e033fe9d 100644 --- a/libcst/_parser/parso/tests/test_utils.py +++ b/libcst/_parser/parso/tests/test_utils.py @@ -14,7 +14,7 @@ # - Remove grammar-specific tests # pyre-unsafe from libcst._parser.parso.utils import python_bytes_to_unicode, split_lines -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class ParsoUtilsTest(UnitTest): diff --git a/libcst/_parser/py_whitespace_parser.py b/libcst/_parser/py_whitespace_parser.py index 6eabc8ea..25b4c984 100644 --- a/libcst/_parser/py_whitespace_parser.py +++ b/libcst/_parser/py_whitespace_parser.py @@ -6,13 +6,13 @@ from typing import List, Optional, Sequence, Tuple, Union from libcst._nodes.whitespace import ( - COMMENT_RE, - NEWLINE_RE, - SIMPLE_WHITESPACE_RE, Comment, + COMMENT_RE, EmptyLine, Newline, + NEWLINE_RE, ParenthesizedWhitespace, + SIMPLE_WHITESPACE_RE, SimpleWhitespace, TrailingWhitespace, ) diff --git a/libcst/_parser/tests/test_detect_config.py b/libcst/_parser/tests/test_detect_config.py index fdda965b..69300d01 100644 --- a/libcst/_parser/tests/test_detect_config.py +++ b/libcst/_parser/tests/test_detect_config.py @@ -8,11 +8,11 @@ from typing import Union from libcst._parser.detect_config import detect_config from libcst._parser.parso.utils import PythonVersionInfo from libcst._parser.types.config import ( + parser_config_asdict, ParserConfig, PartialParserConfig, - parser_config_asdict, ) -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class TestDetectConfig(UnitTest): diff --git a/libcst/_parser/tests/test_footer_behavior.py b/libcst/_parser/tests/test_footer_behavior.py index f3df77f8..951e07f3 100644 --- a/libcst/_parser/tests/test_footer_behavior.py +++ b/libcst/_parser/tests/test_footer_behavior.py @@ -8,7 +8,7 @@ from textwrap import dedent import libcst as cst from libcst import parse_module from libcst._nodes.deep_equals import deep_equals -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class FooterBehaviorTest(UnitTest): diff --git a/libcst/_parser/tests/test_node_identity.py b/libcst/_parser/tests/test_node_identity.py index dce157ef..6a801a75 100644 --- a/libcst/_parser/tests/test_node_identity.py +++ b/libcst/_parser/tests/test_node_identity.py @@ -6,7 +6,7 @@ from collections import Counter from textwrap import dedent import libcst as cst -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class DuplicateLeafNodeTest(UnitTest): diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index 17bf3581..9e01f1fe 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -9,7 +9,7 @@ from typing import Callable import libcst as cst from libcst._parser.entrypoints import is_native -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class ParseErrorsTest(UnitTest): diff --git a/libcst/_parser/tests/test_version_compare.py b/libcst/_parser/tests/test_version_compare.py index efef5a25..5e3dacbf 100644 --- a/libcst/_parser/tests/test_version_compare.py +++ b/libcst/_parser/tests/test_version_compare.py @@ -5,7 +5,7 @@ from libcst._parser.grammar import _should_include from libcst._parser.parso.utils import PythonVersionInfo -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class VersionCompareTest(UnitTest): diff --git a/libcst/_parser/tests/test_whitespace_parser.py b/libcst/_parser/tests/test_whitespace_parser.py index 22be2412..2ab0206d 100644 --- a/libcst/_parser/tests/test_whitespace_parser.py +++ b/libcst/_parser/tests/test_whitespace_parser.py @@ -14,7 +14,7 @@ from libcst._parser.whitespace_parser import ( parse_simple_whitespace, parse_trailing_whitespace, ) -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest _T = TypeVar("_T") diff --git a/libcst/_parser/tests/test_wrapped_tokenize.py b/libcst/_parser/tests/test_wrapped_tokenize.py index ead00432..75369914 100644 --- a/libcst/_parser/tests/test_wrapped_tokenize.py +++ b/libcst/_parser/tests/test_wrapped_tokenize.py @@ -8,10 +8,10 @@ from typing import Sequence from libcst._exceptions import ParserSyntaxError from libcst._parser.parso.python.token import PythonTokenTypes -from libcst._parser.parso.utils import PythonVersionInfo, parse_version_string +from libcst._parser.parso.utils import parse_version_string, PythonVersionInfo from libcst._parser.types.whitespace_state import WhitespaceState from libcst._parser.wrapped_tokenize import Token, tokenize -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest _PY38 = parse_version_string("3.8.0") _PY37 = parse_version_string("3.7.0") diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index bf244f1c..df133675 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -12,7 +12,7 @@ from typing import Any, Callable, FrozenSet, List, Mapping, Optional, Pattern, U from libcst._add_slots import add_slots from libcst._nodes.whitespace import NEWLINE_RE -from libcst._parser.parso.utils import PythonVersionInfo, parse_version_string +from libcst._parser.parso.utils import parse_version_string, PythonVersionInfo _INDENT_RE: Pattern[str] = re.compile(r"[ \t]+") diff --git a/libcst/_parser/types/tests/test_config.py b/libcst/_parser/types/tests/test_config.py index 8b68bd18..98b81b9b 100644 --- a/libcst/_parser/types/tests/test_config.py +++ b/libcst/_parser/types/tests/test_config.py @@ -6,7 +6,7 @@ from typing import Callable from libcst._parser.types.config import PartialParserConfig -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class TestConfig(UnitTest): diff --git a/libcst/_position.py b/libcst/_position.py index be99d4d3..6eb2d68d 100644 --- a/libcst/_position.py +++ b/libcst/_position.py @@ -13,7 +13,7 @@ locations. """ from dataclasses import dataclass -from typing import Tuple, Union, cast, overload +from typing import cast, overload, Tuple, Union from libcst._add_slots import add_slots diff --git a/libcst/_removal_sentinel.py b/libcst/_removal_sentinel.py index b8ba8498..581fc6a0 100644 --- a/libcst/_removal_sentinel.py +++ b/libcst/_removal_sentinel.py @@ -8,7 +8,7 @@ Used by visitors. This is hoisted into a separate module to avoid some circular dependencies in the definition of CSTNode. """ -from enum import Enum, auto +from enum import auto, Enum class RemovalSentinel(Enum): diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index f536ca3a..e9275bac 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -5,7 +5,7 @@ # This file was generated by libcst.codegen.gen_matcher_classes -from typing import Optional, Union, TYPE_CHECKING +from typing import Optional, TYPE_CHECKING, Union from libcst._flatten_sentinel import FlattenSentinel from libcst._maybe_sentinel import MaybeSentinel @@ -28,10 +28,10 @@ if TYPE_CHECKING: BinaryOperation, BooleanOperation, Call, - CompFor, - CompIf, Comparison, ComparisonTarget, + CompFor, + CompIf, ConcatenatedString, Dict, DictComp, @@ -57,9 +57,9 @@ if TYPE_CHECKING: Name, NamedExpr, Param, + Parameters, ParamSlash, ParamStar, - Parameters, RightCurlyBrace, RightParen, RightSquareBracket, diff --git a/libcst/_typed_visitor_base.py b/libcst/_typed_visitor_base.py index 41b115c1..25c3ee15 100644 --- a/libcst/_typed_visitor_base.py +++ b/libcst/_typed_visitor_base.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast +from typing import Any, Callable, cast, TYPE_CHECKING, TypeVar if TYPE_CHECKING: from libcst._typed_visitor import CSTTypedBaseFunctions # noqa: F401 diff --git a/libcst/codemod/__init__.py b/libcst/codemod/__init__.py index 8aec421a..c072d69a 100644 --- a/libcst/codemod/__init__.py +++ b/libcst/codemod/__init__.py @@ -4,11 +4,11 @@ # LICENSE file in the root directory of this source tree. # from libcst.codemod._cli import ( - ParallelTransformResult, diff_code, exec_transform_with_prettyprint, gather_files, parallel_exec_transform_with_prettyprint, + ParallelTransformResult, ) from libcst.codemod._codemod import Codemod from libcst.codemod._command import ( @@ -20,12 +20,12 @@ from libcst.codemod._context import CodemodContext from libcst.codemod._runner import ( SkipFile, SkipReason, + transform_module, TransformExit, TransformFailure, TransformResult, TransformSkip, TransformSuccess, - transform_module, ) from libcst.codemod._testing import CodemodTest from libcst.codemod._visitor import ContextAwareTransformer, ContextAwareVisitor diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 1dff878f..f944de51 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -15,22 +15,22 @@ import sys import time import traceback from dataclasses import dataclass, replace -from multiprocessing import Pool, cpu_count +from multiprocessing import cpu_count, Pool from pathlib import Path, PurePath -from typing import Any, AnyStr, Dict, List, Optional, Sequence, Union, cast +from typing import Any, AnyStr, cast, Dict, List, Optional, Sequence, Union -from libcst import PartialParserConfig, parse_module +from libcst import parse_module, PartialParserConfig from libcst.codemod._codemod import Codemod from libcst.codemod._dummy_pool import DummyPool from libcst.codemod._runner import ( SkipFile, SkipReason, + transform_module, TransformExit, TransformFailure, TransformResult, TransformSkip, TransformSuccess, - transform_module, ) from libcst.metadata import FullRepoManager diff --git a/libcst/codemod/_runner.py b/libcst/codemod/_runner.py index 1748bd9e..fe3b8e91 100644 --- a/libcst/codemod/_runner.py +++ b/libcst/codemod/_runner.py @@ -13,7 +13,7 @@ from dataclasses import dataclass from enum import Enum from typing import Optional, Sequence, Union -from libcst import PartialParserConfig, parse_module +from libcst import parse_module, PartialParserConfig from libcst.codemod._codemod import Codemod # All datastructures defined in this class are pickleable so that they can be used diff --git a/libcst/codemod/_testing.py b/libcst/codemod/_testing.py index 75895b10..1152ad3c 100644 --- a/libcst/codemod/_testing.py +++ b/libcst/codemod/_testing.py @@ -6,7 +6,7 @@ from textwrap import dedent from typing import Optional, Sequence, Type -from libcst import PartialParserConfig, parse_module +from libcst import parse_module, PartialParserConfig from libcst.codemod._codemod import Codemod from libcst.codemod._context import CodemodContext from libcst.codemod._runner import SkipFile diff --git a/libcst/codemod/tests/test_cli.py b/libcst/codemod/tests/test_cli.py index 99b4e09a..45a6924c 100644 --- a/libcst/codemod/tests/test_cli.py +++ b/libcst/codemod/tests/test_cli.py @@ -6,7 +6,7 @@ from typing import Optional from libcst.codemod._cli import _calculate_module -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class TestPackageCalculation(UnitTest): diff --git a/libcst/codemod/tests/test_runner.py b/libcst/codemod/tests/test_runner.py index 07673055..9eda2ef9 100644 --- a/libcst/codemod/tests/test_runner.py +++ b/libcst/codemod/tests/test_runner.py @@ -12,11 +12,11 @@ from libcst.codemod import ( CodemodContext, CodemodTest, SkipFile, + transform_module, TransformExit, TransformFailure, TransformSkip, TransformSuccess, - transform_module, ) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index db9df20c..8d6181ac 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -13,7 +13,7 @@ from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor from libcst.helpers import get_full_name_for_node -from libcst.metadata import QualifiedNameProvider, PositionProvider +from libcst.metadata import PositionProvider, QualifiedNameProvider NameOrAttribute = Union[cst.Name, cst.Attribute] diff --git a/libcst/codemod/visitors/_gather_string_annotation_names.py b/libcst/codemod/visitors/_gather_string_annotation_names.py index 244646f9..81e8488f 100644 --- a/libcst/codemod/visitors/_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/_gather_string_annotation_names.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Collection, List, Set, Union, cast +from typing import cast, Collection, List, Set, Union import libcst as cst import libcst.matchers as m diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 29652aa9..7514d073 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -12,8 +12,8 @@ from typing import Dict, Type from libcst import parse_module from libcst.codemod import Codemod, CodemodContext, CodemodTest from libcst.codemod.visitors._apply_type_annotations import ( - ApplyTypeAnnotationsVisitor, AnnotationCounts, + ApplyTypeAnnotationsVisitor, ) from libcst.testing.utils import data_provider diff --git a/libcst/helpers/tests/test_expression.py b/libcst/helpers/tests/test_expression.py index f80e6082..af9f820c 100644 --- a/libcst/helpers/tests/test_expression.py +++ b/libcst/helpers/tests/test_expression.py @@ -12,7 +12,7 @@ from libcst.helpers import ( get_full_name_for_node, get_full_name_for_node_or_raise, ) -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class ExpressionTest(UnitTest): diff --git a/libcst/helpers/tests/test_statement.py b/libcst/helpers/tests/test_statement.py index 4d2009d1..9db114c0 100644 --- a/libcst/helpers/tests/test_statement.py +++ b/libcst/helpers/tests/test_statement.py @@ -11,7 +11,7 @@ from libcst.helpers import ( get_absolute_module_for_import, get_absolute_module_for_import_or_raise, ) -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class StatementTest(UnitTest): diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index d00e8f42..27eab2ab 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -11,30 +11,31 @@ from typing import Optional, Sequence, Union from typing_extensions import Literal import libcst as cst -from libcst.matchers._decorators import call_if_inside, call_if_not_inside, visit, leave +from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit + from libcst.matchers._matcher_base import ( AbstractBaseMatcherNodeMeta, - BaseMatcherNode, - DoNotCareSentinel, - DoNotCare, - TypeOf, - OneOf, AllOf, - DoesNotMatch, - MatchIfTrue, - MatchRegex, - MatchMetadata, - MatchMetadataIfTrue, - ZeroOrMore, AtLeastN, - ZeroOrOne, AtMostN, - SaveMatchedNode, + BaseMatcherNode, + DoesNotMatch, + DoNotCare, + DoNotCareSentinel, extract, extractall, findall, matches, + MatchIfTrue, + MatchMetadata, + MatchMetadataIfTrue, + MatchRegex, + OneOf, replace, + SaveMatchedNode, + TypeOf, + ZeroOrMore, + ZeroOrOne, ) from libcst.matchers._visitors import ( MatchDecoratorMismatch, diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 28f5c5e9..58b352be 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -9,9 +9,10 @@ import inspect import re from abc import ABCMeta from dataclasses import dataclass, fields -from enum import Enum, auto +from enum import auto, Enum from typing import ( Callable, + cast, Dict, Generic, Iterator, @@ -25,7 +26,6 @@ from typing import ( Type, TypeVar, Union, - cast, ) import libcst diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index 0a9152fe..5aae0292 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -23,10 +23,10 @@ from libcst._nodes.expression import ( BinaryOperation, BooleanOperation, Call, - CompFor, - CompIf, Comparison, ComparisonTarget, + CompFor, + CompIf, ConcatenatedString, Dict, DictComp, @@ -52,9 +52,9 @@ from libcst._nodes.expression import ( Name, NamedExpr, Param, + Parameters, ParamSlash, ParamStar, - Parameters, RightCurlyBrace, RightParen, RightSquareBracket, @@ -71,6 +71,7 @@ from libcst._nodes.expression import ( Yield, ) from libcst._nodes.module import Module + from libcst._nodes.op import ( Add, AddAssign, diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index 83f07739..5e53d379 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -7,7 +7,9 @@ from inspect import ismethod, signature from typing import ( Any, Callable, + cast, Dict, + get_type_hints, List, Optional, Sequence, @@ -15,8 +17,6 @@ from typing import ( Tuple, Type, Union, - cast, - get_type_hints, ) import libcst as cst @@ -33,14 +33,14 @@ from libcst.matchers._matcher_base import ( AtLeastN, AtMostN, BaseMatcherNode, - MatchIfTrue, - MatchMetadata, - MatchMetadataIfTrue, - OneOf, extract, extractall, findall, matches, + MatchIfTrue, + MatchMetadata, + MatchMetadataIfTrue, + OneOf, replace, ) from libcst.matchers._return_types import TYPED_FUNCTION_RETURN_MAPPING diff --git a/libcst/matchers/tests/test_decorators.py b/libcst/matchers/tests/test_decorators.py index b1ff3d05..196e109a 100644 --- a/libcst/matchers/tests/test_decorators.py +++ b/libcst/matchers/tests/test_decorators.py @@ -11,11 +11,11 @@ from unittest.mock import Mock import libcst as cst import libcst.matchers as m from libcst.matchers import ( - MatcherDecoratableTransformer, - MatcherDecoratableVisitor, call_if_inside, call_if_not_inside, leave, + MatcherDecoratableTransformer, + MatcherDecoratableVisitor, visit, ) from libcst.testing.utils import UnitTest diff --git a/libcst/matchers/tests/test_visitors.py b/libcst/matchers/tests/test_visitors.py index 3fc2c658..8a9afd69 100644 --- a/libcst/matchers/tests/test_visitors.py +++ b/libcst/matchers/tests/test_visitors.py @@ -9,10 +9,10 @@ from typing import Union import libcst as cst import libcst.matchers as m from libcst.matchers import ( + leave, MatchDecoratorMismatch, MatcherDecoratableTransformer, MatcherDecoratableVisitor, - leave, visit, ) from libcst.testing.utils import UnitTest diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index 24949b53..f2c99495 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -6,16 +6,16 @@ from pathlib import Path from types import MappingProxyType from typing import ( - TYPE_CHECKING, Callable, + cast, Generic, List, Mapping, MutableMapping, Optional, Type, + TYPE_CHECKING, TypeVar, - cast, ) from libcst._batched_visitor import BatchableCSTVisitor @@ -28,7 +28,7 @@ from libcst._visitors import CSTVisitor if TYPE_CHECKING: from libcst._nodes.base import CSTNode - from libcst._nodes.module import Module, _ModuleSelfT as _ModuleT + from libcst._nodes.module import _ModuleSelfT as _ModuleT, Module from libcst.metadata.wrapper import MetadataWrapper diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index d9dffa12..8eda53c8 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. -from enum import Enum, auto +from enum import auto, Enum from typing import Optional, Sequence import libcst as cst diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index cec74e7f..f9ba1406 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -5,7 +5,7 @@ from pathlib import Path -from typing import TYPE_CHECKING, Collection, Dict, List, Mapping +from typing import Collection, Dict, List, Mapping, TYPE_CHECKING import libcst as cst from libcst.metadata.wrapper import MetadataWrapper diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 0c7df8d8..a8989992 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -9,7 +9,7 @@ import builtins from collections import defaultdict from contextlib import contextmanager from dataclasses import dataclass -from enum import Enum, auto +from enum import auto, Enum from typing import ( Collection, Dict, diff --git a/libcst/metadata/tests/test_expression_context_provider.py b/libcst/metadata/tests/test_expression_context_provider.py index 91008df6..a0292872 100644 --- a/libcst/metadata/tests/test_expression_context_provider.py +++ b/libcst/metadata/tests/test_expression_context_provider.py @@ -5,7 +5,7 @@ from textwrap import dedent -from typing import Dict, Optional, cast +from typing import cast, Dict, Optional import libcst as cst from libcst import parse_module diff --git a/libcst/metadata/tests/test_parent_node_provider.py b/libcst/metadata/tests/test_parent_node_provider.py index f5f617f6..00b54cdc 100644 --- a/libcst/metadata/tests/test_parent_node_provider.py +++ b/libcst/metadata/tests/test_parent_node_provider.py @@ -8,7 +8,7 @@ from textwrap import dedent import libcst as cst from libcst.metadata import MetadataWrapper, ParentNodeProvider -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class DependentVisitor(cst.CSTVisitor): diff --git a/libcst/metadata/tests/test_reentrant_codegen.py b/libcst/metadata/tests/test_reentrant_codegen.py index 8e6abeab..fbb686e7 100644 --- a/libcst/metadata/tests/test_reentrant_codegen.py +++ b/libcst/metadata/tests/test_reentrant_codegen.py @@ -9,7 +9,7 @@ from typing import Callable import libcst as cst from libcst.metadata import ExperimentalReentrantCodegenProvider, MetadataWrapper -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class ExperimentalReentrantCodegenProviderTest(UnitTest): diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 4f84f439..e61f43b8 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -6,29 +6,29 @@ import sys from textwrap import dedent -from typing import Mapping, Tuple, cast, Sequence +from typing import cast, Mapping, Sequence, Tuple from unittest import mock import libcst as cst from libcst import ensure_type from libcst.metadata import MetadataWrapper from libcst.metadata.scope_provider import ( + _gen_dotted_names, Assignment, - ImportAssignment, BuiltinAssignment, BuiltinScope, ClassScope, ComprehensionScope, FunctionScope, GlobalScope, + ImportAssignment, LocalScope, QualifiedName, QualifiedNameSource, Scope, ScopeProvider, - _gen_dotted_names, ) -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class DependentVisitor(cst.CSTVisitor): diff --git a/libcst/metadata/tests/test_span_provider.py b/libcst/metadata/tests/test_span_provider.py index 600b8820..9626894a 100644 --- a/libcst/metadata/tests/test_span_provider.py +++ b/libcst/metadata/tests/test_span_provider.py @@ -6,10 +6,10 @@ import libcst as cst from libcst.metadata.span_provider import ( + byte_length_in_utf8, ByteSpanPositionProvider, CodeSpan, SpanProvidingCodegenState, - byte_length_in_utf8, ) from libcst.testing.utils import UnitTest diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index 164367b9..383fa3d8 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -14,7 +14,7 @@ from unittest import skipIf import libcst as cst from libcst import MetadataWrapper from libcst.metadata.type_inference_provider import PyreData, TypeInferenceProvider -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest from libcst.tests.test_pyre_integration import TEST_SUITE_PATH diff --git a/libcst/metadata/wrapper.py b/libcst/metadata/wrapper.py index cbba22f5..828dcfc1 100644 --- a/libcst/metadata/wrapper.py +++ b/libcst/metadata/wrapper.py @@ -8,8 +8,8 @@ import textwrap from contextlib import ExitStack from types import MappingProxyType from typing import ( - TYPE_CHECKING, Any, + cast, Collection, Iterable, Mapping, @@ -17,11 +17,11 @@ from typing import ( MutableSet, Optional, Type, + TYPE_CHECKING, TypeVar, - cast, ) -from libcst._batched_visitor import BatchableCSTVisitor, VisitorMethod, visit_batched +from libcst._batched_visitor import BatchableCSTVisitor, visit_batched, VisitorMethod from libcst._exceptions import MetadataException from libcst.metadata.base_provider import BatchableMetadataProvider diff --git a/libcst/tests/test_deep_clone.py b/libcst/tests/test_deep_clone.py index 6df62d40..eaf56adf 100644 --- a/libcst/tests/test_deep_clone.py +++ b/libcst/tests/test_deep_clone.py @@ -6,7 +6,7 @@ from textwrap import dedent from typing import Set import libcst as cst -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class DeepCloneTest(UnitTest): diff --git a/libcst/tests/test_e2e.py b/libcst/tests/test_e2e.py index 1b836eab..ecdc23ba 100644 --- a/libcst/tests/test_e2e.py +++ b/libcst/tests/test_e2e.py @@ -5,12 +5,12 @@ from tempfile import TemporaryDirectory from typing import Generator from unittest import TestCase -from libcst import BaseExpression, Call, Name, matchers as m +from libcst import BaseExpression, Call, matchers as m, Name from libcst.codemod import ( CodemodContext, - VisitorBasedCodemodCommand, gather_files, parallel_exec_transform_with_prettyprint, + VisitorBasedCodemodCommand, ) from libcst.codemod.visitors import AddImportsVisitor diff --git a/libcst/tests/test_exceptions.py b/libcst/tests/test_exceptions.py index a1c7d842..4ca88e5a 100644 --- a/libcst/tests/test_exceptions.py +++ b/libcst/tests/test_exceptions.py @@ -8,7 +8,7 @@ import pickle from textwrap import dedent import libcst as cst -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class ExceptionsTest(UnitTest): diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index e42e08e2..9d4b3f20 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -12,11 +12,11 @@ from typing import Dict, List, Mapping, Optional, Tuple, Union import libcst as cst from libcst.metadata import MetadataWrapper, PositionProvider from libcst.metadata.type_inference_provider import ( - PyreData, _process_pyre_data, + PyreData, run_command, ) -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest TEST_SUITE_PATH: Path = Path(__file__).parent / "pyre" diff --git a/libcst/tests/test_tabs.py b/libcst/tests/test_tabs.py index f72824c5..f21811b2 100644 --- a/libcst/tests/test_tabs.py +++ b/libcst/tests/test_tabs.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. from libcst._tabs import expand_tabs -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest class ExpandTabsTest(UnitTest): diff --git a/libcst/tests/test_type_enforce.py b/libcst/tests/test_type_enforce.py index aa417156..16955241 100644 --- a/libcst/tests/test_type_enforce.py +++ b/libcst/tests/test_type_enforce.py @@ -5,7 +5,6 @@ from types import MappingProxyType from typing import ( - TYPE_CHECKING, Any, AsyncGenerator, ClassVar, @@ -20,13 +19,14 @@ from typing import ( Set, Tuple, Type, + TYPE_CHECKING, Union, ) from typing_extensions import Literal from libcst._type_enforce import is_value_of_type -from libcst.testing.utils import UnitTest, data_provider +from libcst.testing.utils import data_provider, UnitTest if TYPE_CHECKING: from collections import Counter # noqa: F401 diff --git a/libcst/tool.py b/libcst/tool.py index e7612bd4..cb907764 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -23,12 +23,12 @@ from typing import Any, Callable, Dict, List, Sequence, Tuple, Type import yaml from libcst import ( - LIBCST_VERSION, CSTNode, IndentedBlock, + LIBCST_VERSION, Module, - PartialParserConfig, parse_module, + PartialParserConfig, ) from libcst._nodes.deep_equals import deep_equals from libcst.codemod import ( diff --git a/requirements-dev.txt b/requirements-dev.txt index 9223d3a3..3c4afca7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,5 +14,5 @@ setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 tox>=3.18.1 ufmt==1.3 -usort==0.6.3 +usort==1.0.0rc1 setuptools-rust>=0.12.1 diff --git a/setup.py b/setup.py index 4e35ca68..5558798f 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. -from os import path, environ +from os import environ, path import setuptools from setuptools_rust import Binding, RustExtension From 1d1b7da05fc68dd59a3cf9d4f9adb2079375674e Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 22 Dec 2021 13:56:12 -0700 Subject: [PATCH 164/632] .gitignore shared-object built artifacts (#567) --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index dbe480d7..dc2a1b30 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ *.swo *.pyc *.pyo +*.so *.egg-info/ .eggs/ .pyre/ From 9c13ca5f9cf7d4ebcee553fa7e9572d3171c7153 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 23 Dec 2021 16:51:41 +0000 Subject: [PATCH 165/632] pin checkout action to v1 (#569) --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ccdb9171..9ea35898 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -203,7 +203,7 @@ jobs: CIBW_ARCHS: ${{ matrix.vers }} CIBW_BUILD_VERBOSITY: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v1 - uses: actions/setup-python@v2 with: python-version: 3.9 From c44ff0500b52dd78716c36c7d1efd3016e20005f Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 28 Dec 2021 11:55:18 +0000 Subject: [PATCH 166/632] Fix license headers (#560) * Facebook -> Meta * remove year from doc copyright --- LICENSE | 2 +- docs/source/_static/custom.css | 2 +- docs/source/conf.py | 4 ++-- libcst/__init__.py | 2 +- libcst/_batched_visitor.py | 2 +- libcst/_exceptions.py | 2 +- libcst/_flatten_sentinel.py | 2 +- libcst/_maybe_sentinel.py | 2 +- libcst/_metadata_dependent.py | 2 +- libcst/_nodes/__init__.py | 2 +- libcst/_nodes/base.py | 2 +- libcst/_nodes/deep_equals.py | 2 +- libcst/_nodes/expression.py | 2 +- libcst/_nodes/internal.py | 2 +- libcst/_nodes/module.py | 2 +- libcst/_nodes/op.py | 2 +- libcst/_nodes/statement.py | 2 +- libcst/_nodes/tests/__init__.py | 2 +- libcst/_nodes/tests/base.py | 2 +- libcst/_nodes/tests/test_assert.py | 2 +- libcst/_nodes/tests/test_assign.py | 2 +- libcst/_nodes/tests/test_atom.py | 2 +- libcst/_nodes/tests/test_attribute.py | 2 +- libcst/_nodes/tests/test_await.py | 2 +- libcst/_nodes/tests/test_binary_op.py | 2 +- libcst/_nodes/tests/test_boolean_op.py | 2 +- libcst/_nodes/tests/test_call.py | 2 +- libcst/_nodes/tests/test_classdef.py | 2 +- libcst/_nodes/tests/test_comment.py | 2 +- libcst/_nodes/tests/test_comparison.py | 2 +- libcst/_nodes/tests/test_cst_node.py | 2 +- libcst/_nodes/tests/test_del.py | 2 +- libcst/_nodes/tests/test_dict.py | 2 +- libcst/_nodes/tests/test_dict_comp.py | 2 +- libcst/_nodes/tests/test_docstring.py | 2 +- libcst/_nodes/tests/test_else.py | 2 +- libcst/_nodes/tests/test_empty_line.py | 2 +- libcst/_nodes/tests/test_flatten_behavior.py | 2 +- libcst/_nodes/tests/test_for.py | 2 +- libcst/_nodes/tests/test_funcdef.py | 2 +- libcst/_nodes/tests/test_global.py | 2 +- libcst/_nodes/tests/test_if.py | 2 +- libcst/_nodes/tests/test_ifexp.py | 2 +- libcst/_nodes/tests/test_import.py | 2 +- libcst/_nodes/tests/test_indented_block.py | 2 +- libcst/_nodes/tests/test_lambda.py | 2 +- libcst/_nodes/tests/test_leaf_small_statements.py | 2 +- libcst/_nodes/tests/test_list.py | 2 +- libcst/_nodes/tests/test_matrix_multiply.py | 2 +- libcst/_nodes/tests/test_module.py | 2 +- libcst/_nodes/tests/test_namedexpr.py | 2 +- libcst/_nodes/tests/test_newline.py | 2 +- libcst/_nodes/tests/test_nonlocal.py | 2 +- libcst/_nodes/tests/test_number.py | 2 +- libcst/_nodes/tests/test_raise.py | 2 +- libcst/_nodes/tests/test_removal_behavior.py | 2 +- libcst/_nodes/tests/test_return.py | 2 +- libcst/_nodes/tests/test_set.py | 2 +- libcst/_nodes/tests/test_simple_comp.py | 2 +- libcst/_nodes/tests/test_simple_statement.py | 2 +- libcst/_nodes/tests/test_simple_whitespace.py | 2 +- libcst/_nodes/tests/test_small_statement.py | 2 +- libcst/_nodes/tests/test_subscript.py | 2 +- libcst/_nodes/tests/test_trailing_whitespace.py | 2 +- libcst/_nodes/tests/test_try.py | 2 +- libcst/_nodes/tests/test_tuple.py | 2 +- libcst/_nodes/tests/test_unary_op.py | 2 +- libcst/_nodes/tests/test_while.py | 2 +- libcst/_nodes/tests/test_with.py | 2 +- libcst/_nodes/tests/test_yield.py | 2 +- libcst/_nodes/whitespace.py | 2 +- libcst/_parser/__init__.py | 2 +- libcst/_parser/conversions/__init__.py | 2 +- libcst/_parser/conversions/expression.py | 2 +- libcst/_parser/conversions/module.py | 2 +- libcst/_parser/conversions/params.py | 2 +- libcst/_parser/conversions/statement.py | 2 +- libcst/_parser/conversions/terminals.py | 2 +- libcst/_parser/custom_itertools.py | 2 +- libcst/_parser/detect_config.py | 2 +- libcst/_parser/entrypoints.py | 2 +- libcst/_parser/grammar.py | 2 +- libcst/_parser/parso/__init__.py | 2 +- libcst/_parser/parso/pgen2/__init__.py | 2 +- libcst/_parser/parso/python/__init__.py | 2 +- libcst/_parser/parso/python/token.py | 2 +- libcst/_parser/parso/tests/__init__.py | 2 +- libcst/_parser/production_decorator.py | 2 +- libcst/_parser/py_whitespace_parser.py | 2 +- libcst/_parser/python_parser.py | 2 +- libcst/_parser/tests/__init__.py | 2 +- libcst/_parser/tests/test_config.py | 2 +- libcst/_parser/tests/test_detect_config.py | 2 +- libcst/_parser/tests/test_footer_behavior.py | 2 +- libcst/_parser/tests/test_node_identity.py | 2 +- libcst/_parser/tests/test_parse_errors.py | 2 +- libcst/_parser/tests/test_version_compare.py | 2 +- libcst/_parser/tests/test_whitespace_parser.py | 2 +- libcst/_parser/tests/test_wrapped_tokenize.py | 2 +- libcst/_parser/types/__init__.py | 2 +- libcst/_parser/types/config.py | 2 +- libcst/_parser/types/conversions.py | 2 +- libcst/_parser/types/partials.py | 2 +- libcst/_parser/types/production.py | 2 +- libcst/_parser/types/py_config.py | 2 +- libcst/_parser/types/py_token.py | 2 +- libcst/_parser/types/py_whitespace_state.py | 2 +- libcst/_parser/types/tests/__init__.py | 2 +- libcst/_parser/types/tests/test_config.py | 2 +- libcst/_parser/types/token.py | 2 +- libcst/_parser/types/whitespace_state.py | 2 +- libcst/_parser/whitespace_parser.py | 2 +- libcst/_parser/wrapped_tokenize.py | 2 +- libcst/_position.py | 2 +- libcst/_removal_sentinel.py | 2 +- libcst/_tabs.py | 2 +- libcst/_type_enforce.py | 2 +- libcst/_typed_visitor.py | 2 +- libcst/_typed_visitor_base.py | 2 +- libcst/_types.py | 2 +- libcst/_visitors.py | 2 +- libcst/codegen/__init__.py | 2 +- libcst/codegen/gather.py | 2 +- libcst/codegen/gen_matcher_classes.py | 4 ++-- libcst/codegen/gen_type_mapping.py | 4 ++-- libcst/codegen/gen_visitor_functions.py | 4 ++-- libcst/codegen/generate.py | 2 +- libcst/codegen/tests/__init__.py | 2 +- libcst/codegen/tests/test_codegen_clean.py | 2 +- libcst/codegen/transforms.py | 2 +- libcst/codemod/__init__.py | 2 +- libcst/codemod/_cli.py | 2 +- libcst/codemod/_codemod.py | 2 +- libcst/codemod/_command.py | 2 +- libcst/codemod/_context.py | 2 +- libcst/codemod/_dummy_pool.py | 2 +- libcst/codemod/_runner.py | 2 +- libcst/codemod/_testing.py | 2 +- libcst/codemod/_visitor.py | 2 +- libcst/codemod/commands/__init__.py | 2 +- libcst/codemod/commands/add_pyre_directive.py | 2 +- libcst/codemod/commands/convert_format_to_fstring.py | 2 +- libcst/codemod/commands/convert_namedtuple_to_dataclass.py | 2 +- .../codemod/commands/convert_percent_format_to_fstring.py | 2 +- libcst/codemod/commands/ensure_import_present.py | 2 +- libcst/codemod/commands/fix_pyre_directives.py | 2 +- libcst/codemod/commands/noop.py | 2 +- libcst/codemod/commands/remove_pyre_directive.py | 2 +- libcst/codemod/commands/remove_unused_imports.py | 2 +- libcst/codemod/commands/rename.py | 2 +- libcst/codemod/commands/strip_strings_from_types.py | 2 +- libcst/codemod/commands/tests/__init__.py | 2 +- libcst/codemod/commands/tests/test_add_pyre_directive.py | 2 +- .../commands/tests/test_convert_format_to_fstring.py | 2 +- .../commands/tests/test_convert_namedtuple_to_dataclass.py | 2 +- .../tests/test_convert_percent_format_to_fstring.py | 2 +- libcst/codemod/commands/tests/test_ensure_import_present.py | 2 +- libcst/codemod/commands/tests/test_fix_pyre_directives.py | 2 +- libcst/codemod/commands/tests/test_noop.py | 2 +- libcst/codemod/commands/tests/test_remove_pyre_directive.py | 2 +- libcst/codemod/commands/tests/test_remove_unused_imports.py | 2 +- libcst/codemod/commands/tests/test_rename.py | 2 +- .../codemod/commands/tests/test_strip_strings_from_types.py | 2 +- .../commands/tests/test_unnecessary_format_string.py | 2 +- libcst/codemod/commands/unnecessary_format_string.py | 2 +- libcst/codemod/tests/__init__.py | 2 +- libcst/codemod/tests/codemod_formatter_error_input.py.txt | 2 +- libcst/codemod/tests/test_cli.py | 2 +- libcst/codemod/tests/test_codemod.py | 2 +- libcst/codemod/tests/test_codemod_cli.py | 2 +- libcst/codemod/tests/test_metadata.py | 2 +- libcst/codemod/tests/test_runner.py | 2 +- libcst/codemod/visitors/__init__.py | 2 +- libcst/codemod/visitors/_add_imports.py | 2 +- libcst/codemod/visitors/_apply_type_annotations.py | 2 +- libcst/codemod/visitors/_gather_comments.py | 2 +- libcst/codemod/visitors/_gather_exports.py | 2 +- libcst/codemod/visitors/_gather_imports.py | 2 +- libcst/codemod/visitors/_gather_string_annotation_names.py | 2 +- libcst/codemod/visitors/_gather_unused_imports.py | 2 +- libcst/codemod/visitors/_remove_imports.py | 2 +- libcst/codemod/visitors/tests/__init__.py | 2 +- libcst/codemod/visitors/tests/test_add_imports.py | 2 +- .../codemod/visitors/tests/test_apply_type_annotations.py | 2 +- libcst/codemod/visitors/tests/test_gather_comments.py | 2 +- libcst/codemod/visitors/tests/test_gather_exports.py | 2 +- libcst/codemod/visitors/tests/test_gather_imports.py | 2 +- .../visitors/tests/test_gather_string_annotation_names.py | 2 +- libcst/codemod/visitors/tests/test_gather_unused_imports.py | 2 +- libcst/codemod/visitors/tests/test_remove_imports.py | 2 +- libcst/helpers/__init__.py | 2 +- libcst/helpers/_statement.py | 2 +- libcst/helpers/_template.py | 2 +- libcst/helpers/common.py | 2 +- libcst/helpers/expression.py | 2 +- libcst/helpers/module.py | 2 +- libcst/helpers/tests/__init__.py | 2 +- libcst/helpers/tests/test_expression.py | 2 +- libcst/helpers/tests/test_module.py | 2 +- libcst/helpers/tests/test_statement.py | 2 +- libcst/helpers/tests/test_template.py | 2 +- libcst/matchers/__init__.py | 2 +- libcst/matchers/_decorators.py | 2 +- libcst/matchers/_matcher_base.py | 2 +- libcst/matchers/_return_types.py | 2 +- libcst/matchers/_visitors.py | 2 +- libcst/matchers/tests/__init__.py | 2 +- libcst/matchers/tests/test_decorators.py | 2 +- libcst/matchers/tests/test_extract.py | 2 +- libcst/matchers/tests/test_findall.py | 2 +- libcst/matchers/tests/test_matchers.py | 2 +- libcst/matchers/tests/test_matchers_with_metadata.py | 2 +- libcst/matchers/tests/test_replace.py | 2 +- libcst/matchers/tests/test_visitors.py | 2 +- libcst/metadata/__init__.py | 2 +- libcst/metadata/base_provider.py | 2 +- libcst/metadata/expression_context_provider.py | 2 +- libcst/metadata/full_repo_manager.py | 2 +- libcst/metadata/name_provider.py | 2 +- libcst/metadata/parent_node_provider.py | 2 +- libcst/metadata/position_provider.py | 2 +- libcst/metadata/reentrant_codegen.py | 2 +- libcst/metadata/scope_provider.py | 2 +- libcst/metadata/span_provider.py | 2 +- libcst/metadata/tests/__init__.py | 2 +- libcst/metadata/tests/test_base_provider.py | 2 +- libcst/metadata/tests/test_expression_context_provider.py | 2 +- libcst/metadata/tests/test_full_repo_manager.py | 2 +- libcst/metadata/tests/test_metadata_provider.py | 2 +- libcst/metadata/tests/test_metadata_wrapper.py | 2 +- libcst/metadata/tests/test_name_provider.py | 2 +- libcst/metadata/tests/test_parent_node_provider.py | 2 +- libcst/metadata/tests/test_position_provider.py | 2 +- libcst/metadata/tests/test_reentrant_codegen.py | 2 +- libcst/metadata/tests/test_scope_provider.py | 2 +- libcst/metadata/tests/test_span_provider.py | 2 +- libcst/metadata/tests/test_type_inference_provider.py | 2 +- libcst/metadata/type_inference_provider.py | 2 +- libcst/metadata/wrapper.py | 2 +- libcst/testing/__init__.py | 2 +- libcst/testing/utils.py | 2 +- libcst/tests/__init__.py | 2 +- libcst/tests/pyre/simple_class.py | 2 +- libcst/tests/test_batched_visitor.py | 2 +- libcst/tests/test_deep_clone.py | 2 +- libcst/tests/test_deep_replace.py | 2 +- libcst/tests/test_exceptions.py | 2 +- libcst/tests/test_pyre_integration.py | 2 +- libcst/tests/test_tabs.py | 2 +- libcst/tests/test_tool.py | 2 +- libcst/tests/test_type_enforce.py | 2 +- libcst/tests/test_visitor.py | 2 +- libcst/tool.py | 2 +- native/libcst/Cargo.toml | 2 +- native/libcst/benches/parser_benchmark.rs | 5 +++++ native/libcst/src/bin.rs | 5 +++++ native/libcst/src/lib.rs | 2 +- native/libcst/src/nodes/codegen.rs | 2 +- native/libcst/src/nodes/expression.rs | 2 +- native/libcst/src/nodes/inflate_helpers.rs | 5 +++++ native/libcst/src/nodes/macros.rs | 2 +- native/libcst/src/nodes/mod.rs | 5 +++++ native/libcst/src/nodes/module.rs | 2 +- native/libcst/src/nodes/op.rs | 2 +- native/libcst/src/nodes/parser_config.rs | 2 +- native/libcst/src/nodes/py_cached.rs | 2 +- native/libcst/src/nodes/statement.rs | 2 +- native/libcst/src/nodes/test_utils.rs | 2 +- native/libcst/src/nodes/traits.rs | 5 +++++ native/libcst/src/nodes/whitespace.rs | 2 +- native/libcst/src/parser/errors.rs | 5 +++++ native/libcst/src/parser/grammar.rs | 2 +- native/libcst/src/parser/mod.rs | 5 +++++ native/libcst/src/parser/numbers.rs | 5 +++++ native/libcst/src/py.rs | 5 +++++ native/libcst/src/tokenizer/core/mod.rs | 2 +- native/libcst/src/tokenizer/core/string_types.rs | 2 +- native/libcst/src/tokenizer/debug_utils.rs | 2 +- native/libcst/src/tokenizer/mod.rs | 2 +- native/libcst/src/tokenizer/operators.rs | 2 +- native/libcst/src/tokenizer/tests.rs | 2 +- native/libcst/src/tokenizer/text_position/char_width.rs | 2 +- native/libcst/src/tokenizer/text_position/mod.rs | 2 +- native/libcst/src/tokenizer/whitespace_parser.rs | 5 +++++ native/libcst/tests/parser_roundtrip.rs | 5 +++++ native/libcst_derive/src/codegen.rs | 5 +++++ native/libcst_derive/src/inflate.rs | 5 +++++ native/libcst_derive/src/into_py.rs | 5 +++++ native/libcst_derive/src/lib.rs | 5 +++++ native/libcst_derive/src/parenthesized_node.rs | 5 +++++ setup.py | 2 +- stubs/libcst_native/parser_config.pyi | 6 +----- stubs/libcst_native/token_type.pyi | 2 +- stubs/libcst_native/tokenize.pyi | 2 +- stubs/libcst_native/whitespace_parser.pyi | 2 +- stubs/libcst_native/whitespace_state.pyi | 3 +-- 296 files changed, 364 insertions(+), 289 deletions(-) diff --git a/LICENSE b/LICENSE index 13df011c..ad52f20b 100644 --- a/LICENSE +++ b/LICENSE @@ -26,7 +26,7 @@ These files are: MIT License -Copyright (c) Facebook, Inc. and its affiliates. +Copyright (c) Meta Platforms, Inc. and affiliates. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/docs/source/_static/custom.css b/docs/source/_static/custom.css index 80660072..5b756e8a 100644 --- a/docs/source/_static/custom.css +++ b/docs/source/_static/custom.css @@ -1,5 +1,5 @@ /** - * Copyright (c) Facebook, Inc. and its affiliates. + * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/docs/source/conf.py b/docs/source/conf.py index 22ca3e8b..2f20dcab 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -26,7 +26,7 @@ # -- Project information ----------------------------------------------------- project = "LibCST" -copyright = "2019, Facebook" +copyright = "Meta Platforms, Inc. and affiliates" author = "Benjamin Woodruff, Jennifer Taylor, Carl Meyer, Jimmy Lai, Ray Zeng" # The short X.Y version diff --git a/libcst/__init__.py b/libcst/__init__.py index 01c1850c..c51dd9bc 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_batched_visitor.py b/libcst/_batched_visitor.py index 1f2c1a45..d853738f 100644 --- a/libcst/_batched_visitor.py +++ b/libcst/_batched_visitor.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_exceptions.py b/libcst/_exceptions.py index b549602f..0ba689c2 100644 --- a/libcst/_exceptions.py +++ b/libcst/_exceptions.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_flatten_sentinel.py b/libcst/_flatten_sentinel.py index 44865050..b41d66ef 100644 --- a/libcst/_flatten_sentinel.py +++ b/libcst/_flatten_sentinel.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_maybe_sentinel.py b/libcst/_maybe_sentinel.py index f3cac711..d5eaab11 100644 --- a/libcst/_maybe_sentinel.py +++ b/libcst/_maybe_sentinel.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_metadata_dependent.py b/libcst/_metadata_dependent.py index 1a1ca61f..6a768270 100644 --- a/libcst/_metadata_dependent.py +++ b/libcst/_metadata_dependent.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/__init__.py b/libcst/_nodes/__init__.py index 01f1f091..35d483bc 100644 --- a/libcst/_nodes/__init__.py +++ b/libcst/_nodes/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index 21c6061c..fd86cad4 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/deep_equals.py b/libcst/_nodes/deep_equals.py index 1e18227c..de08be7f 100644 --- a/libcst/_nodes/deep_equals.py +++ b/libcst/_nodes/deep_equals.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index cd177daa..2dc7c882 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/internal.py b/libcst/_nodes/internal.py index 4b4dfcaa..35d89743 100644 --- a/libcst/_nodes/internal.py +++ b/libcst/_nodes/internal.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/module.py b/libcst/_nodes/module.py index 51643ff5..149a4375 100644 --- a/libcst/_nodes/module.py +++ b/libcst/_nodes/module.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/op.py b/libcst/_nodes/op.py index 54fdd73c..ea02835a 100644 --- a/libcst/_nodes/op.py +++ b/libcst/_nodes/op.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 5e8068ee..bf82d8a1 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/__init__.py b/libcst/_nodes/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_nodes/tests/__init__.py +++ b/libcst/_nodes/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/base.py b/libcst/_nodes/tests/base.py index ed3b2b10..3ac22890 100644 --- a/libcst/_nodes/tests/base.py +++ b/libcst/_nodes/tests/base.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_assert.py b/libcst/_nodes/tests/test_assert.py index 4af2e53a..5d080215 100644 --- a/libcst/_nodes/tests/test_assert.py +++ b/libcst/_nodes/tests/test_assert.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_assign.py b/libcst/_nodes/tests/test_assign.py index 7b107d94..b91533b3 100644 --- a/libcst/_nodes/tests/test_assign.py +++ b/libcst/_nodes/tests/test_assign.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index d6544e2f..c2e31608 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_attribute.py b/libcst/_nodes/tests/test_attribute.py index e3c021e3..ef017a1e 100644 --- a/libcst/_nodes/tests/test_attribute.py +++ b/libcst/_nodes/tests/test_attribute.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_await.py b/libcst/_nodes/tests/test_await.py index aa404b44..8aa408a9 100644 --- a/libcst/_nodes/tests/test_await.py +++ b/libcst/_nodes/tests/test_await.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_binary_op.py b/libcst/_nodes/tests/test_binary_op.py index 50f8ff79..b6ac8b09 100644 --- a/libcst/_nodes/tests/test_binary_op.py +++ b/libcst/_nodes/tests/test_binary_op.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_boolean_op.py b/libcst/_nodes/tests/test_boolean_op.py index bf63a49d..efde7ce4 100644 --- a/libcst/_nodes/tests/test_boolean_op.py +++ b/libcst/_nodes/tests/test_boolean_op.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_call.py b/libcst/_nodes/tests/test_call.py index c58af996..666db00d 100644 --- a/libcst/_nodes/tests/test_call.py +++ b/libcst/_nodes/tests/test_call.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_classdef.py b/libcst/_nodes/tests/test_classdef.py index 1f0c4090..db582dce 100644 --- a/libcst/_nodes/tests/test_classdef.py +++ b/libcst/_nodes/tests/test_classdef.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_comment.py b/libcst/_nodes/tests/test_comment.py index 79393765..a2c8d2c9 100644 --- a/libcst/_nodes/tests/test_comment.py +++ b/libcst/_nodes/tests/test_comment.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_comparison.py b/libcst/_nodes/tests/test_comparison.py index b70dab1e..d4b985c7 100644 --- a/libcst/_nodes/tests/test_comparison.py +++ b/libcst/_nodes/tests/test_comparison.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_cst_node.py b/libcst/_nodes/tests/test_cst_node.py index 84611184..8cc30dc6 100644 --- a/libcst/_nodes/tests/test_cst_node.py +++ b/libcst/_nodes/tests/test_cst_node.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_del.py b/libcst/_nodes/tests/test_del.py index 83d97cb4..ecbebcb5 100644 --- a/libcst/_nodes/tests/test_del.py +++ b/libcst/_nodes/tests/test_del.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_dict.py b/libcst/_nodes/tests/test_dict.py index 430be588..1ee33332 100644 --- a/libcst/_nodes/tests/test_dict.py +++ b/libcst/_nodes/tests/test_dict.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_dict_comp.py b/libcst/_nodes/tests/test_dict_comp.py index 829cb96f..a9970f9d 100644 --- a/libcst/_nodes/tests/test_dict_comp.py +++ b/libcst/_nodes/tests/test_dict_comp.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_docstring.py b/libcst/_nodes/tests/test_docstring.py index 8bb8b555..7c87a9c0 100644 --- a/libcst/_nodes/tests/test_docstring.py +++ b/libcst/_nodes/tests/test_docstring.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_else.py b/libcst/_nodes/tests/test_else.py index ffaad752..d144ab81 100644 --- a/libcst/_nodes/tests/test_else.py +++ b/libcst/_nodes/tests/test_else.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_empty_line.py b/libcst/_nodes/tests/test_empty_line.py index ad4c647c..9e1a6f72 100644 --- a/libcst/_nodes/tests/test_empty_line.py +++ b/libcst/_nodes/tests/test_empty_line.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_flatten_behavior.py b/libcst/_nodes/tests/test_flatten_behavior.py index d373b61d..463c457f 100644 --- a/libcst/_nodes/tests/test_flatten_behavior.py +++ b/libcst/_nodes/tests/test_flatten_behavior.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_for.py b/libcst/_nodes/tests/test_for.py index 9fbc3e49..c4d1421b 100644 --- a/libcst/_nodes/tests/test_for.py +++ b/libcst/_nodes/tests/test_for.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 4675f918..5d5a80ac 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_global.py b/libcst/_nodes/tests/test_global.py index 038c0368..da76f9a3 100644 --- a/libcst/_nodes/tests/test_global.py +++ b/libcst/_nodes/tests/test_global.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_if.py b/libcst/_nodes/tests/test_if.py index 52dffa33..e6e4c5ae 100644 --- a/libcst/_nodes/tests/test_if.py +++ b/libcst/_nodes/tests/test_if.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_ifexp.py b/libcst/_nodes/tests/test_ifexp.py index ef65b9d5..e00924b1 100644 --- a/libcst/_nodes/tests/test_ifexp.py +++ b/libcst/_nodes/tests/test_ifexp.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_import.py b/libcst/_nodes/tests/test_import.py index 0da7c38c..eedfcdaf 100644 --- a/libcst/_nodes/tests/test_import.py +++ b/libcst/_nodes/tests/test_import.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_indented_block.py b/libcst/_nodes/tests/test_indented_block.py index 4397c864..8190f2d9 100644 --- a/libcst/_nodes/tests/test_indented_block.py +++ b/libcst/_nodes/tests/test_indented_block.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_lambda.py b/libcst/_nodes/tests/test_lambda.py index 5ff75f1d..82c1b675 100644 --- a/libcst/_nodes/tests/test_lambda.py +++ b/libcst/_nodes/tests/test_lambda.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_leaf_small_statements.py b/libcst/_nodes/tests/test_leaf_small_statements.py index 9ab3e499..fb202002 100644 --- a/libcst/_nodes/tests/test_leaf_small_statements.py +++ b/libcst/_nodes/tests/test_leaf_small_statements.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_list.py b/libcst/_nodes/tests/test_list.py index e2f8bd18..9096ced6 100644 --- a/libcst/_nodes/tests/test_list.py +++ b/libcst/_nodes/tests/test_list.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_matrix_multiply.py b/libcst/_nodes/tests/test_matrix_multiply.py index b91147e6..5b4b8668 100644 --- a/libcst/_nodes/tests/test_matrix_multiply.py +++ b/libcst/_nodes/tests/test_matrix_multiply.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_module.py b/libcst/_nodes/tests/test_module.py index 3177c578..5b33c6b7 100644 --- a/libcst/_nodes/tests/test_module.py +++ b/libcst/_nodes/tests/test_module.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_namedexpr.py b/libcst/_nodes/tests/test_namedexpr.py index 3949bbea..f24045ca 100644 --- a/libcst/_nodes/tests/test_namedexpr.py +++ b/libcst/_nodes/tests/test_namedexpr.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_newline.py b/libcst/_nodes/tests/test_newline.py index ab5935bf..4b5e8d8b 100644 --- a/libcst/_nodes/tests/test_newline.py +++ b/libcst/_nodes/tests/test_newline.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_nonlocal.py b/libcst/_nodes/tests/test_nonlocal.py index 86b65301..62178e4a 100644 --- a/libcst/_nodes/tests/test_nonlocal.py +++ b/libcst/_nodes/tests/test_nonlocal.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_number.py b/libcst/_nodes/tests/test_number.py index 517a97c5..f1b53505 100644 --- a/libcst/_nodes/tests/test_number.py +++ b/libcst/_nodes/tests/test_number.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_raise.py b/libcst/_nodes/tests/test_raise.py index 7113357e..4b2253fd 100644 --- a/libcst/_nodes/tests/test_raise.py +++ b/libcst/_nodes/tests/test_raise.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_removal_behavior.py b/libcst/_nodes/tests/test_removal_behavior.py index e97d3681..9b1bf619 100644 --- a/libcst/_nodes/tests/test_removal_behavior.py +++ b/libcst/_nodes/tests/test_removal_behavior.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_return.py b/libcst/_nodes/tests/test_return.py index 75aa587a..85356e19 100644 --- a/libcst/_nodes/tests/test_return.py +++ b/libcst/_nodes/tests/test_return.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_set.py b/libcst/_nodes/tests/test_set.py index 3c55268f..bdf84955 100644 --- a/libcst/_nodes/tests/test_set.py +++ b/libcst/_nodes/tests/test_set.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_simple_comp.py b/libcst/_nodes/tests/test_simple_comp.py index e7dfdc76..4de0c0a9 100644 --- a/libcst/_nodes/tests/test_simple_comp.py +++ b/libcst/_nodes/tests/test_simple_comp.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_simple_statement.py b/libcst/_nodes/tests/test_simple_statement.py index 5870e56e..847d3e0f 100644 --- a/libcst/_nodes/tests/test_simple_statement.py +++ b/libcst/_nodes/tests/test_simple_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_simple_whitespace.py b/libcst/_nodes/tests/test_simple_whitespace.py index b571d7f2..fca4139c 100644 --- a/libcst/_nodes/tests/test_simple_whitespace.py +++ b/libcst/_nodes/tests/test_simple_whitespace.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_small_statement.py b/libcst/_nodes/tests/test_small_statement.py index 73e5296f..5defd891 100644 --- a/libcst/_nodes/tests/test_small_statement.py +++ b/libcst/_nodes/tests/test_small_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_subscript.py b/libcst/_nodes/tests/test_subscript.py index 7e39eb5e..44ccba29 100644 --- a/libcst/_nodes/tests/test_subscript.py +++ b/libcst/_nodes/tests/test_subscript.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_trailing_whitespace.py b/libcst/_nodes/tests/test_trailing_whitespace.py index d11d553e..7b1b2587 100644 --- a/libcst/_nodes/tests/test_trailing_whitespace.py +++ b/libcst/_nodes/tests/test_trailing_whitespace.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index df4a6ae4..a0eceac9 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_tuple.py b/libcst/_nodes/tests/test_tuple.py index 6f3b9806..db9f2cce 100644 --- a/libcst/_nodes/tests/test_tuple.py +++ b/libcst/_nodes/tests/test_tuple.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_unary_op.py b/libcst/_nodes/tests/test_unary_op.py index c9dbb53b..28062d8b 100644 --- a/libcst/_nodes/tests/test_unary_op.py +++ b/libcst/_nodes/tests/test_unary_op.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_while.py b/libcst/_nodes/tests/test_while.py index 1bdc8976..adf6e17e 100644 --- a/libcst/_nodes/tests/test_while.py +++ b/libcst/_nodes/tests/test_while.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_with.py b/libcst/_nodes/tests/test_with.py index 4da84b35..2246bc2d 100644 --- a/libcst/_nodes/tests/test_with.py +++ b/libcst/_nodes/tests/test_with.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/tests/test_yield.py b/libcst/_nodes/tests/test_yield.py index 7194c04c..22a18872 100644 --- a/libcst/_nodes/tests/test_yield.py +++ b/libcst/_nodes/tests/test_yield.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_nodes/whitespace.py b/libcst/_nodes/whitespace.py index e16eea43..686c14fb 100644 --- a/libcst/_nodes/whitespace.py +++ b/libcst/_nodes/whitespace.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/__init__.py b/libcst/_parser/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/__init__.py +++ b/libcst/_parser/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/conversions/__init__.py b/libcst/_parser/conversions/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/conversions/__init__.py +++ b/libcst/_parser/conversions/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index 7d68a168..53c6b0a9 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/conversions/module.py b/libcst/_parser/conversions/module.py index ad3bed66..b40641d0 100644 --- a/libcst/_parser/conversions/module.py +++ b/libcst/_parser/conversions/module.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/conversions/params.py b/libcst/_parser/conversions/params.py index 597e7ac9..aa51d681 100644 --- a/libcst/_parser/conversions/params.py +++ b/libcst/_parser/conversions/params.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/conversions/statement.py b/libcst/_parser/conversions/statement.py index f6ac7fb6..96e0be1a 100644 --- a/libcst/_parser/conversions/statement.py +++ b/libcst/_parser/conversions/statement.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/conversions/terminals.py b/libcst/_parser/conversions/terminals.py index 96d9391b..f5697229 100644 --- a/libcst/_parser/conversions/terminals.py +++ b/libcst/_parser/conversions/terminals.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/custom_itertools.py b/libcst/_parser/custom_itertools.py index 867c934e..81cfdb4b 100644 --- a/libcst/_parser/custom_itertools.py +++ b/libcst/_parser/custom_itertools.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/detect_config.py b/libcst/_parser/detect_config.py index ae3222c1..375a4f07 100644 --- a/libcst/_parser/detect_config.py +++ b/libcst/_parser/detect_config.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index af58301a..461433bd 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/grammar.py b/libcst/_parser/grammar.py index 7e678acc..8e6ade59 100644 --- a/libcst/_parser/grammar.py +++ b/libcst/_parser/grammar.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/parso/__init__.py b/libcst/_parser/parso/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/parso/__init__.py +++ b/libcst/_parser/parso/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/parso/pgen2/__init__.py b/libcst/_parser/parso/pgen2/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/parso/pgen2/__init__.py +++ b/libcst/_parser/parso/pgen2/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/parso/python/__init__.py b/libcst/_parser/parso/python/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/parso/python/__init__.py +++ b/libcst/_parser/parso/python/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/parso/python/token.py b/libcst/_parser/parso/python/token.py index e4798f10..de883719 100644 --- a/libcst/_parser/parso/python/token.py +++ b/libcst/_parser/parso/python/token.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/parso/tests/__init__.py b/libcst/_parser/parso/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/parso/tests/__init__.py +++ b/libcst/_parser/parso/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/production_decorator.py b/libcst/_parser/production_decorator.py index ffa0d038..3515e0b4 100644 --- a/libcst/_parser/production_decorator.py +++ b/libcst/_parser/production_decorator.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/py_whitespace_parser.py b/libcst/_parser/py_whitespace_parser.py index 25b4c984..a3c53a97 100644 --- a/libcst/_parser/py_whitespace_parser.py +++ b/libcst/_parser/py_whitespace_parser.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/python_parser.py b/libcst/_parser/python_parser.py index 05ea0b57..7f3d53db 100644 --- a/libcst/_parser/python_parser.py +++ b/libcst/_parser/python_parser.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/__init__.py b/libcst/_parser/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/tests/__init__.py +++ b/libcst/_parser/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/test_config.py b/libcst/_parser/tests/test_config.py index 78692369..d9c31db5 100644 --- a/libcst/_parser/tests/test_config.py +++ b/libcst/_parser/tests/test_config.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/test_detect_config.py b/libcst/_parser/tests/test_detect_config.py index 69300d01..6d9eaa6c 100644 --- a/libcst/_parser/tests/test_detect_config.py +++ b/libcst/_parser/tests/test_detect_config.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/test_footer_behavior.py b/libcst/_parser/tests/test_footer_behavior.py index 951e07f3..ccac8254 100644 --- a/libcst/_parser/tests/test_footer_behavior.py +++ b/libcst/_parser/tests/test_footer_behavior.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/test_node_identity.py b/libcst/_parser/tests/test_node_identity.py index 6a801a75..91171915 100644 --- a/libcst/_parser/tests/test_node_identity.py +++ b/libcst/_parser/tests/test_node_identity.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index 9e01f1fe..331dd81c 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/test_version_compare.py b/libcst/_parser/tests/test_version_compare.py index 5e3dacbf..102657d6 100644 --- a/libcst/_parser/tests/test_version_compare.py +++ b/libcst/_parser/tests/test_version_compare.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/test_whitespace_parser.py b/libcst/_parser/tests/test_whitespace_parser.py index 2ab0206d..bbe8886a 100644 --- a/libcst/_parser/tests/test_whitespace_parser.py +++ b/libcst/_parser/tests/test_whitespace_parser.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/tests/test_wrapped_tokenize.py b/libcst/_parser/tests/test_wrapped_tokenize.py index 75369914..dbaf3700 100644 --- a/libcst/_parser/tests/test_wrapped_tokenize.py +++ b/libcst/_parser/tests/test_wrapped_tokenize.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/__init__.py b/libcst/_parser/types/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/types/__init__.py +++ b/libcst/_parser/types/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index df133675..1233de0c 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/conversions.py b/libcst/_parser/types/conversions.py index 9951e6da..4c589c52 100644 --- a/libcst/_parser/types/conversions.py +++ b/libcst/_parser/types/conversions.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/partials.py b/libcst/_parser/types/partials.py index abd35071..4db89fab 100644 --- a/libcst/_parser/types/partials.py +++ b/libcst/_parser/types/partials.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/production.py b/libcst/_parser/types/production.py index bb60014a..dfeffe7b 100644 --- a/libcst/_parser/types/production.py +++ b/libcst/_parser/types/production.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/py_config.py b/libcst/_parser/types/py_config.py index 6722a9ea..cb91d558 100644 --- a/libcst/_parser/types/py_config.py +++ b/libcst/_parser/types/py_config.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/py_token.py b/libcst/_parser/types/py_token.py index 60ddb2a2..7fac5eb8 100644 --- a/libcst/_parser/types/py_token.py +++ b/libcst/_parser/types/py_token.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/py_whitespace_state.py b/libcst/_parser/types/py_whitespace_state.py index 41244b98..99047590 100644 --- a/libcst/_parser/types/py_whitespace_state.py +++ b/libcst/_parser/types/py_whitespace_state.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/tests/__init__.py b/libcst/_parser/types/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/_parser/types/tests/__init__.py +++ b/libcst/_parser/types/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/tests/test_config.py b/libcst/_parser/types/tests/test_config.py index 98b81b9b..12a6ad27 100644 --- a/libcst/_parser/types/tests/test_config.py +++ b/libcst/_parser/types/tests/test_config.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/token.py b/libcst/_parser/types/token.py index 88d50b25..32c85ccf 100644 --- a/libcst/_parser/types/token.py +++ b/libcst/_parser/types/token.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/whitespace_state.py b/libcst/_parser/types/whitespace_state.py index a9798054..7eaeab32 100644 --- a/libcst/_parser/types/whitespace_state.py +++ b/libcst/_parser/types/whitespace_state.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/whitespace_parser.py b/libcst/_parser/whitespace_parser.py index 9ffb6a7d..1fe5a051 100644 --- a/libcst/_parser/whitespace_parser.py +++ b/libcst/_parser/whitespace_parser.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/wrapped_tokenize.py b/libcst/_parser/wrapped_tokenize.py index ae86c910..8d601052 100644 --- a/libcst/_parser/wrapped_tokenize.py +++ b/libcst/_parser/wrapped_tokenize.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_position.py b/libcst/_position.py index 6eb2d68d..ebf3801e 100644 --- a/libcst/_position.py +++ b/libcst/_position.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_removal_sentinel.py b/libcst/_removal_sentinel.py index 581fc6a0..f88f4126 100644 --- a/libcst/_removal_sentinel.py +++ b/libcst/_removal_sentinel.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_tabs.py b/libcst/_tabs.py index 44e245ba..0a98bc55 100644 --- a/libcst/_tabs.py +++ b/libcst/_tabs.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_type_enforce.py b/libcst/_type_enforce.py index 92f158d3..6629eb87 100644 --- a/libcst/_type_enforce.py +++ b/libcst/_type_enforce.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index e9275bac..82ca072d 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_typed_visitor_base.py b/libcst/_typed_visitor_base.py index 25c3ee15..61b3c2d5 100644 --- a/libcst/_typed_visitor_base.py +++ b/libcst/_typed_visitor_base.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_types.py b/libcst/_types.py index 8c5c380f..8df90ee5 100644 --- a/libcst/_types.py +++ b/libcst/_types.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_visitors.py b/libcst/_visitors.py index 6bb8557b..79ea6f40 100644 --- a/libcst/_visitors.py +++ b/libcst/_visitors.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codegen/__init__.py b/libcst/codegen/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/codegen/__init__.py +++ b/libcst/codegen/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codegen/gather.py b/libcst/codegen/gather.py index fe309b74..7d7fa8d6 100644 --- a/libcst/codegen/gather.py +++ b/libcst/codegen/gather.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index 59fb251e..c8453a5d 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -431,7 +431,7 @@ def _get_fields(node: Type[cst.CSTNode]) -> Generator[Field, None, None]: all_exports: Set[str] = set() generated_code: List[str] = [] -generated_code.append("# Copyright (c) Facebook, Inc. and its affiliates.") +generated_code.append("# Copyright (c) Meta Platforms, Inc. and affiliates.") generated_code.append("#") generated_code.append( "# This source code is licensed under the MIT license found in the" diff --git a/libcst/codegen/gen_type_mapping.py b/libcst/codegen/gen_type_mapping.py index 0f229e06..a5af2bd9 100644 --- a/libcst/codegen/gen_type_mapping.py +++ b/libcst/codegen/gen_type_mapping.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -8,7 +8,7 @@ from typing import List from libcst.codegen.gather import imports, nodebases, nodeuses generated_code: List[str] = [] -generated_code.append("# Copyright (c) Facebook, Inc. and its affiliates.") +generated_code.append("# Copyright (c) Meta Platforms, Inc. and affiliates.") generated_code.append("#") generated_code.append( "# This source code is licensed under the MIT license found in the" diff --git a/libcst/codegen/gen_visitor_functions.py b/libcst/codegen/gen_visitor_functions.py index 07bcf95a..ef369cfa 100644 --- a/libcst/codegen/gen_visitor_functions.py +++ b/libcst/codegen/gen_visitor_functions.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -9,7 +9,7 @@ from typing import List from libcst.codegen.gather import imports, nodebases, nodeuses generated_code: List[str] = [] -generated_code.append("# Copyright (c) Facebook, Inc. and its affiliates.") +generated_code.append("# Copyright (c) Meta Platforms, Inc. and affiliates.") generated_code.append("#") generated_code.append( "# This source code is licensed under the MIT license found in the" diff --git a/libcst/codegen/generate.py b/libcst/codegen/generate.py index 6779c17b..82e08c43 100644 --- a/libcst/codegen/generate.py +++ b/libcst/codegen/generate.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codegen/tests/__init__.py b/libcst/codegen/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/codegen/tests/__init__.py +++ b/libcst/codegen/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codegen/tests/test_codegen_clean.py b/libcst/codegen/tests/test_codegen_clean.py index c2f18df2..b346570d 100644 --- a/libcst/codegen/tests/test_codegen_clean.py +++ b/libcst/codegen/tests/test_codegen_clean.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codegen/transforms.py b/libcst/codegen/transforms.py index 61bbddf5..22f9058e 100644 --- a/libcst/codegen/transforms.py +++ b/libcst/codegen/transforms.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/__init__.py b/libcst/codemod/__init__.py index c072d69a..140b8ef8 100644 --- a/libcst/codemod/__init__.py +++ b/libcst/codemod/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index f944de51..5a25a794 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/_codemod.py b/libcst/codemod/_codemod.py index 0bb4e7db..c0c3b2c7 100644 --- a/libcst/codemod/_codemod.py +++ b/libcst/codemod/_codemod.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/_command.py b/libcst/codemod/_command.py index 0196eb27..2355d492 100644 --- a/libcst/codemod/_command.py +++ b/libcst/codemod/_command.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/_context.py b/libcst/codemod/_context.py index 98e57adf..04154859 100644 --- a/libcst/codemod/_context.py +++ b/libcst/codemod/_context.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/_dummy_pool.py b/libcst/codemod/_dummy_pool.py index f80e06de..c4a24932 100644 --- a/libcst/codemod/_dummy_pool.py +++ b/libcst/codemod/_dummy_pool.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/_runner.py b/libcst/codemod/_runner.py index fe3b8e91..4e76a935 100644 --- a/libcst/codemod/_runner.py +++ b/libcst/codemod/_runner.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/_testing.py b/libcst/codemod/_testing.py index 1152ad3c..c727acbf 100644 --- a/libcst/codemod/_testing.py +++ b/libcst/codemod/_testing.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/_visitor.py b/libcst/codemod/_visitor.py index d368b854..ab915c49 100644 --- a/libcst/codemod/_visitor.py +++ b/libcst/codemod/_visitor.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/__init__.py b/libcst/codemod/commands/__init__.py index 602d2685..aac70d45 100644 --- a/libcst/codemod/commands/__init__.py +++ b/libcst/codemod/commands/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/add_pyre_directive.py b/libcst/codemod/commands/add_pyre_directive.py index 165ebb0b..bee95c61 100644 --- a/libcst/codemod/commands/add_pyre_directive.py +++ b/libcst/codemod/commands/add_pyre_directive.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/convert_format_to_fstring.py b/libcst/codemod/commands/convert_format_to_fstring.py index 29e81246..bebff329 100644 --- a/libcst/codemod/commands/convert_format_to_fstring.py +++ b/libcst/codemod/commands/convert_format_to_fstring.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py index 09935b70..91e78048 100644 --- a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py +++ b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/convert_percent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py index 2190f808..4ba1e753 100644 --- a/libcst/codemod/commands/convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/convert_percent_format_to_fstring.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/ensure_import_present.py b/libcst/codemod/commands/ensure_import_present.py index c2ec033f..eaf16123 100644 --- a/libcst/codemod/commands/ensure_import_present.py +++ b/libcst/codemod/commands/ensure_import_present.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/fix_pyre_directives.py b/libcst/codemod/commands/fix_pyre_directives.py index b5310d8b..c3ab41b7 100644 --- a/libcst/codemod/commands/fix_pyre_directives.py +++ b/libcst/codemod/commands/fix_pyre_directives.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/noop.py b/libcst/codemod/commands/noop.py index eef1d897..23ea0a17 100644 --- a/libcst/codemod/commands/noop.py +++ b/libcst/codemod/commands/noop.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/remove_pyre_directive.py b/libcst/codemod/commands/remove_pyre_directive.py index a9d38506..01bf89a6 100644 --- a/libcst/codemod/commands/remove_pyre_directive.py +++ b/libcst/codemod/commands/remove_pyre_directive.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/remove_unused_imports.py b/libcst/codemod/commands/remove_unused_imports.py index b11e44c0..e9938d8a 100644 --- a/libcst/codemod/commands/remove_unused_imports.py +++ b/libcst/codemod/commands/remove_unused_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 03d5ddef..cd0a417f 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/strip_strings_from_types.py b/libcst/codemod/commands/strip_strings_from_types.py index b219a7db..6564d041 100644 --- a/libcst/codemod/commands/strip_strings_from_types.py +++ b/libcst/codemod/commands/strip_strings_from_types.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/__init__.py b/libcst/codemod/commands/tests/__init__.py index 602d2685..aac70d45 100644 --- a/libcst/codemod/commands/tests/__init__.py +++ b/libcst/codemod/commands/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_add_pyre_directive.py b/libcst/codemod/commands/tests/test_add_pyre_directive.py index b7fad93d..a8e7e45a 100644 --- a/libcst/codemod/commands/tests/test_add_pyre_directive.py +++ b/libcst/codemod/commands/tests/test_add_pyre_directive.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py index 35a7aece..b2209cdc 100644 --- a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py +++ b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py index 675bf58a..866f03c4 100644 --- a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py +++ b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py b/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py index 2e65eac2..af34d3e1 100644 --- a/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/tests/test_convert_percent_format_to_fstring.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_ensure_import_present.py b/libcst/codemod/commands/tests/test_ensure_import_present.py index b389c3bf..f9329957 100644 --- a/libcst/codemod/commands/tests/test_ensure_import_present.py +++ b/libcst/codemod/commands/tests/test_ensure_import_present.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_fix_pyre_directives.py b/libcst/codemod/commands/tests/test_fix_pyre_directives.py index dea57825..8871397a 100644 --- a/libcst/codemod/commands/tests/test_fix_pyre_directives.py +++ b/libcst/codemod/commands/tests/test_fix_pyre_directives.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_noop.py b/libcst/codemod/commands/tests/test_noop.py index 827f3a3c..12995f5c 100644 --- a/libcst/codemod/commands/tests/test_noop.py +++ b/libcst/codemod/commands/tests/test_noop.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_remove_pyre_directive.py b/libcst/codemod/commands/tests/test_remove_pyre_directive.py index 06ccb8f8..74c1847c 100644 --- a/libcst/codemod/commands/tests/test_remove_pyre_directive.py +++ b/libcst/codemod/commands/tests/test_remove_pyre_directive.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_remove_unused_imports.py b/libcst/codemod/commands/tests/test_remove_unused_imports.py index 23b1c727..dc2b3366 100644 --- a/libcst/codemod/commands/tests/test_remove_unused_imports.py +++ b/libcst/codemod/commands/tests/test_remove_unused_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index aa95801e..e0668bfb 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_strip_strings_from_types.py b/libcst/codemod/commands/tests/test_strip_strings_from_types.py index 3ec292b9..64a2fd10 100644 --- a/libcst/codemod/commands/tests/test_strip_strings_from_types.py +++ b/libcst/codemod/commands/tests/test_strip_strings_from_types.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/tests/test_unnecessary_format_string.py b/libcst/codemod/commands/tests/test_unnecessary_format_string.py index ebf1977a..e980bd38 100644 --- a/libcst/codemod/commands/tests/test_unnecessary_format_string.py +++ b/libcst/codemod/commands/tests/test_unnecessary_format_string.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/commands/unnecessary_format_string.py b/libcst/codemod/commands/unnecessary_format_string.py index a539d213..d3bcd1c9 100644 --- a/libcst/codemod/commands/unnecessary_format_string.py +++ b/libcst/codemod/commands/unnecessary_format_string.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/tests/__init__.py b/libcst/codemod/tests/__init__.py index 602d2685..aac70d45 100644 --- a/libcst/codemod/tests/__init__.py +++ b/libcst/codemod/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/tests/codemod_formatter_error_input.py.txt b/libcst/codemod/tests/codemod_formatter_error_input.py.txt index c83c175a..02cef44e 100644 --- a/libcst/codemod/tests/codemod_formatter_error_input.py.txt +++ b/libcst/codemod/tests/codemod_formatter_error_input.py.txt @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/tests/test_cli.py b/libcst/codemod/tests/test_cli.py index 45a6924c..9c1834e5 100644 --- a/libcst/codemod/tests/test_cli.py +++ b/libcst/codemod/tests/test_cli.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/tests/test_codemod.py b/libcst/codemod/tests/test_codemod.py index 4fb5dbaa..754f35fe 100644 --- a/libcst/codemod/tests/test_codemod.py +++ b/libcst/codemod/tests/test_codemod.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index f586ea19..934ae667 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/tests/test_metadata.py b/libcst/codemod/tests/test_metadata.py index acd25bf4..1a99b3dd 100644 --- a/libcst/codemod/tests/test_metadata.py +++ b/libcst/codemod/tests/test_metadata.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/tests/test_runner.py b/libcst/codemod/tests/test_runner.py index 9eda2ef9..2a714e07 100644 --- a/libcst/codemod/tests/test_runner.py +++ b/libcst/codemod/tests/test_runner.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/__init__.py b/libcst/codemod/visitors/__init__.py index 12f36f65..a1416505 100644 --- a/libcst/codemod/visitors/__init__.py +++ b/libcst/codemod/visitors/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index ccafc9c2..89897fb5 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 8d6181ac..d29b6c9f 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -1,4 +1,4 @@ -# Copyright (c) 2016-present, Facebook, Inc. +# Copyright (c) 2016-present, Meta Platforms, Inc. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree diff --git a/libcst/codemod/visitors/_gather_comments.py b/libcst/codemod/visitors/_gather_comments.py index 5adcecf0..e499382b 100644 --- a/libcst/codemod/visitors/_gather_comments.py +++ b/libcst/codemod/visitors/_gather_comments.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/_gather_exports.py b/libcst/codemod/visitors/_gather_exports.py index 5186415c..a35b389a 100644 --- a/libcst/codemod/visitors/_gather_exports.py +++ b/libcst/codemod/visitors/_gather_exports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/_gather_imports.py b/libcst/codemod/visitors/_gather_imports.py index 5920890f..5d5a50f9 100644 --- a/libcst/codemod/visitors/_gather_imports.py +++ b/libcst/codemod/visitors/_gather_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/_gather_string_annotation_names.py b/libcst/codemod/visitors/_gather_string_annotation_names.py index 81e8488f..0f1b926b 100644 --- a/libcst/codemod/visitors/_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/_gather_string_annotation_names.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/_gather_unused_imports.py b/libcst/codemod/visitors/_gather_unused_imports.py index 2dc439ac..c2101733 100644 --- a/libcst/codemod/visitors/_gather_unused_imports.py +++ b/libcst/codemod/visitors/_gather_unused_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 9d3b6902..4c53ef19 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/__init__.py b/libcst/codemod/visitors/tests/__init__.py index 602d2685..aac70d45 100644 --- a/libcst/codemod/visitors/tests/__init__.py +++ b/libcst/codemod/visitors/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index 57060269..4e410a14 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 7514d073..80ed335d 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -1,4 +1,4 @@ -# Copyright (c) 2016-present, Facebook, Inc. +# Copyright (c) 2016-present, Meta Platforms, Inc. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/test_gather_comments.py b/libcst/codemod/visitors/tests/test_gather_comments.py index da93823e..72511842 100644 --- a/libcst/codemod/visitors/tests/test_gather_comments.py +++ b/libcst/codemod/visitors/tests/test_gather_comments.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/test_gather_exports.py b/libcst/codemod/visitors/tests/test_gather_exports.py index 56d885e7..3ac9e9ce 100644 --- a/libcst/codemod/visitors/tests/test_gather_exports.py +++ b/libcst/codemod/visitors/tests/test_gather_exports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/test_gather_imports.py b/libcst/codemod/visitors/tests/test_gather_imports.py index b1e2c102..3e5d6b99 100644 --- a/libcst/codemod/visitors/tests/test_gather_imports.py +++ b/libcst/codemod/visitors/tests/test_gather_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py b/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py index 25f2d070..f8a11fcc 100644 --- a/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/test_gather_unused_imports.py b/libcst/codemod/visitors/tests/test_gather_unused_imports.py index 5fb3cba2..e6e0d9bb 100644 --- a/libcst/codemod/visitors/tests/test_gather_unused_imports.py +++ b/libcst/codemod/visitors/tests/test_gather_unused_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index 564cf21e..93e5d7a5 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/__init__.py b/libcst/helpers/__init__.py index 4621c58f..3e23a6d9 100644 --- a/libcst/helpers/__init__.py +++ b/libcst/helpers/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/_statement.py b/libcst/helpers/_statement.py index a9431b44..0d21e225 100644 --- a/libcst/helpers/_statement.py +++ b/libcst/helpers/_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/_template.py b/libcst/helpers/_template.py index d1f02252..e3f915a5 100644 --- a/libcst/helpers/_template.py +++ b/libcst/helpers/_template.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/common.py b/libcst/helpers/common.py index 5bf7e460..0965abeb 100644 --- a/libcst/helpers/common.py +++ b/libcst/helpers/common.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/expression.py b/libcst/helpers/expression.py index 2a93c509..beb5f324 100644 --- a/libcst/helpers/expression.py +++ b/libcst/helpers/expression.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 5f2bddbe..7ae9f7d7 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/tests/__init__.py b/libcst/helpers/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/helpers/tests/__init__.py +++ b/libcst/helpers/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/tests/test_expression.py b/libcst/helpers/tests/test_expression.py index af9f820c..2b44e14b 100644 --- a/libcst/helpers/tests/test_expression.py +++ b/libcst/helpers/tests/test_expression.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/tests/test_module.py b/libcst/helpers/tests/test_module.py index c5be94d5..687e0260 100644 --- a/libcst/helpers/tests/test_module.py +++ b/libcst/helpers/tests/test_module.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/tests/test_statement.py b/libcst/helpers/tests/test_statement.py index 9db114c0..f26900bd 100644 --- a/libcst/helpers/tests/test_statement.py +++ b/libcst/helpers/tests/test_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/helpers/tests/test_template.py b/libcst/helpers/tests/test_template.py index 1805fb51..cef82dde 100644 --- a/libcst/helpers/tests/test_template.py +++ b/libcst/helpers/tests/test_template.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 27eab2ab..2d0121a0 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/_decorators.py b/libcst/matchers/_decorators.py index b6b236cc..68d1eb4e 100644 --- a/libcst/matchers/_decorators.py +++ b/libcst/matchers/_decorators.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 58b352be..e76877de 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index 5aae0292..af0017f9 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index 5e53d379..fe95b385 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/tests/__init__.py b/libcst/matchers/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/matchers/tests/__init__.py +++ b/libcst/matchers/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/tests/test_decorators.py b/libcst/matchers/tests/test_decorators.py index 196e109a..7486cee8 100644 --- a/libcst/matchers/tests/test_decorators.py +++ b/libcst/matchers/tests/test_decorators.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/tests/test_extract.py b/libcst/matchers/tests/test_extract.py index 298f3fec..50a24c27 100644 --- a/libcst/matchers/tests/test_extract.py +++ b/libcst/matchers/tests/test_extract.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/tests/test_findall.py b/libcst/matchers/tests/test_findall.py index 95233f53..77316a1a 100644 --- a/libcst/matchers/tests/test_findall.py +++ b/libcst/matchers/tests/test_findall.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/tests/test_matchers.py b/libcst/matchers/tests/test_matchers.py index 11d6b5f5..9d79f640 100644 --- a/libcst/matchers/tests/test_matchers.py +++ b/libcst/matchers/tests/test_matchers.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/tests/test_matchers_with_metadata.py b/libcst/matchers/tests/test_matchers_with_metadata.py index a41913d9..e4bdf07e 100644 --- a/libcst/matchers/tests/test_matchers_with_metadata.py +++ b/libcst/matchers/tests/test_matchers_with_metadata.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/tests/test_replace.py b/libcst/matchers/tests/test_replace.py index cc922446..db9674f5 100644 --- a/libcst/matchers/tests/test_replace.py +++ b/libcst/matchers/tests/test_replace.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/matchers/tests/test_visitors.py b/libcst/matchers/tests/test_visitors.py index 8a9afd69..2c059921 100644 --- a/libcst/matchers/tests/test_visitors.py +++ b/libcst/matchers/tests/test_visitors.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index 0603f098..75e38229 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index f2c99495..69af2dce 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index 8eda53c8..beec959d 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index f9ba1406..2a05475e 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py index e26a3eb5..174aff73 100644 --- a/libcst/metadata/name_provider.py +++ b/libcst/metadata/name_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/parent_node_provider.py b/libcst/metadata/parent_node_provider.py index 1e569d51..901891b9 100644 --- a/libcst/metadata/parent_node_provider.py +++ b/libcst/metadata/parent_node_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/position_provider.py b/libcst/metadata/position_provider.py index 004487b3..bbc9bb72 100644 --- a/libcst/metadata/position_provider.py +++ b/libcst/metadata/position_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/reentrant_codegen.py b/libcst/metadata/reentrant_codegen.py index 89e11c93..899d2f1f 100644 --- a/libcst/metadata/reentrant_codegen.py +++ b/libcst/metadata/reentrant_codegen.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index a8989992..0e87063c 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/span_provider.py b/libcst/metadata/span_provider.py index 400ec284..39e7b86c 100644 --- a/libcst/metadata/span_provider.py +++ b/libcst/metadata/span_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/__init__.py b/libcst/metadata/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/metadata/tests/__init__.py +++ b/libcst/metadata/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_base_provider.py b/libcst/metadata/tests/test_base_provider.py index e19a4571..0bf4ca51 100644 --- a/libcst/metadata/tests/test_base_provider.py +++ b/libcst/metadata/tests/test_base_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_expression_context_provider.py b/libcst/metadata/tests/test_expression_context_provider.py index a0292872..88ed0a88 100644 --- a/libcst/metadata/tests/test_expression_context_provider.py +++ b/libcst/metadata/tests/test_expression_context_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_full_repo_manager.py b/libcst/metadata/tests/test_full_repo_manager.py index c7a458d1..27066f5a 100644 --- a/libcst/metadata/tests/test_full_repo_manager.py +++ b/libcst/metadata/tests/test_full_repo_manager.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_metadata_provider.py b/libcst/metadata/tests/test_metadata_provider.py index 4f7be175..7de94851 100644 --- a/libcst/metadata/tests/test_metadata_provider.py +++ b/libcst/metadata/tests/test_metadata_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_metadata_wrapper.py b/libcst/metadata/tests/test_metadata_wrapper.py index 36fe7d5b..72b09b04 100644 --- a/libcst/metadata/tests/test_metadata_wrapper.py +++ b/libcst/metadata/tests/test_metadata_wrapper.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index 94b40c21..072a7405 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_parent_node_provider.py b/libcst/metadata/tests/test_parent_node_provider.py index 00b54cdc..f04c1051 100644 --- a/libcst/metadata/tests/test_parent_node_provider.py +++ b/libcst/metadata/tests/test_parent_node_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_position_provider.py b/libcst/metadata/tests/test_position_provider.py index 03132428..c479837e 100644 --- a/libcst/metadata/tests/test_position_provider.py +++ b/libcst/metadata/tests/test_position_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_reentrant_codegen.py b/libcst/metadata/tests/test_reentrant_codegen.py index fbb686e7..c76b2828 100644 --- a/libcst/metadata/tests/test_reentrant_codegen.py +++ b/libcst/metadata/tests/test_reentrant_codegen.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index e61f43b8..1f007200 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_span_provider.py b/libcst/metadata/tests/test_span_provider.py index 9626894a..01aaef37 100644 --- a/libcst/metadata/tests/test_span_provider.py +++ b/libcst/metadata/tests/test_span_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index 383fa3d8..ad1f4471 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index 3ce4d02c..da32e92c 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/metadata/wrapper.py b/libcst/metadata/wrapper.py index 828dcfc1..d8e9f0fb 100644 --- a/libcst/metadata/wrapper.py +++ b/libcst/metadata/wrapper.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/testing/__init__.py b/libcst/testing/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/testing/__init__.py +++ b/libcst/testing/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/testing/utils.py b/libcst/testing/utils.py index 5a1e2023..07ef26e9 100644 --- a/libcst/testing/utils.py +++ b/libcst/testing/utils.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/__init__.py b/libcst/tests/__init__.py index 62642369..7bec24cb 100644 --- a/libcst/tests/__init__.py +++ b/libcst/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/pyre/simple_class.py b/libcst/tests/pyre/simple_class.py index 2ee2d32f..d9d6784c 100644 --- a/libcst/tests/pyre/simple_class.py +++ b/libcst/tests/pyre/simple_class.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_batched_visitor.py b/libcst/tests/test_batched_visitor.py index ee3351f4..9bcc562f 100644 --- a/libcst/tests/test_batched_visitor.py +++ b/libcst/tests/test_batched_visitor.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_deep_clone.py b/libcst/tests/test_deep_clone.py index eaf56adf..b6cf2be5 100644 --- a/libcst/tests/test_deep_clone.py +++ b/libcst/tests/test_deep_clone.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_deep_replace.py b/libcst/tests/test_deep_replace.py index 77c29f09..d8e5b475 100644 --- a/libcst/tests/test_deep_replace.py +++ b/libcst/tests/test_deep_replace.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_exceptions.py b/libcst/tests/test_exceptions.py index 4ca88e5a..f54f1da6 100644 --- a/libcst/tests/test_exceptions.py +++ b/libcst/tests/test_exceptions.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 9d4b3f20..266a3933 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_tabs.py b/libcst/tests/test_tabs.py index f21811b2..23ff9047 100644 --- a/libcst/tests/test_tabs.py +++ b/libcst/tests/test_tabs.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_tool.py b/libcst/tests/test_tool.py index b5b4aeeb..929d7225 100644 --- a/libcst/tests/test_tool.py +++ b/libcst/tests/test_tool.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_type_enforce.py b/libcst/tests/test_type_enforce.py index 16955241..7c01b82b 100644 --- a/libcst/tests/test_type_enforce.py +++ b/libcst/tests/test_type_enforce.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tests/test_visitor.py b/libcst/tests/test_visitor.py index 5bc0510e..bf540553 100644 --- a/libcst/tests/test_visitor.py +++ b/libcst/tests/test_visitor.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/tool.py b/libcst/tool.py index cb907764..bac61186 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index f009e918..04b8013c 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/native/libcst/benches/parser_benchmark.rs b/native/libcst/benches/parser_benchmark.rs index 92f51883..7a12ebb3 100644 --- a/native/libcst/benches/parser_benchmark.rs +++ b/native/libcst/benches/parser_benchmark.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use std::{ path::{Component, PathBuf}, time::Duration, diff --git a/native/libcst/src/bin.rs b/native/libcst/src/bin.rs index 234b9017..1b42859a 100644 --- a/native/libcst/src/bin.rs +++ b/native/libcst/src/bin.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use libcst_native::*; use std::{ env, diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index 6c809768..e4355997 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/codegen.rs b/native/libcst/src/nodes/codegen.rs index 3b4f3e7a..99cc377a 100644 --- a/native/libcst/src/nodes/codegen.rs +++ b/native/libcst/src/nodes/codegen.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index cb70d572..c55e327d 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/inflate_helpers.rs b/native/libcst/src/nodes/inflate_helpers.rs index 902ad032..262de5ac 100644 --- a/native/libcst/src/nodes/inflate_helpers.rs +++ b/native/libcst/src/nodes/inflate_helpers.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use crate::{ nodes::traits::Result, tokenizer::{ diff --git a/native/libcst/src/nodes/macros.rs b/native/libcst/src/nodes/macros.rs index 1c47e3fa..76498b8e 100644 --- a/native/libcst/src/nodes/macros.rs +++ b/native/libcst/src/nodes/macros.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index 43981e87..f39cf607 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + mod whitespace; pub use whitespace::{ Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, diff --git a/native/libcst/src/nodes/module.rs b/native/libcst/src/nodes/module.rs index 8040be64..03c6afb5 100644 --- a/native/libcst/src/nodes/module.rs +++ b/native/libcst/src/nodes/module.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/op.rs b/native/libcst/src/nodes/op.rs index ef09e0a0..fa031d4c 100644 --- a/native/libcst/src/nodes/op.rs +++ b/native/libcst/src/nodes/op.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/parser_config.rs b/native/libcst/src/nodes/parser_config.rs index 3b85eae2..acf140ee 100644 --- a/native/libcst/src/nodes/parser_config.rs +++ b/native/libcst/src/nodes/parser_config.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/py_cached.rs b/native/libcst/src/nodes/py_cached.rs index e8a4dfd4..0a7fe691 100644 --- a/native/libcst/src/nodes/py_cached.rs +++ b/native/libcst/src/nodes/py_cached.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 63c8ab8a..2247045b 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/test_utils.rs b/native/libcst/src/nodes/test_utils.rs index 6a462c8d..109d471d 100644 --- a/native/libcst/src/nodes/test_utils.rs +++ b/native/libcst/src/nodes/test_utils.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index 155ae17a..7e30b359 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use crate::{ tokenizer::whitespace_parser::{Config, WhitespaceError}, Codegen, CodegenState, Comma, EmptyLine, LeftParen, RightParen, diff --git a/native/libcst/src/nodes/whitespace.rs b/native/libcst/src/nodes/whitespace.rs index 89f2800e..2c99a048 100644 --- a/native/libcst/src/nodes/whitespace.rs +++ b/native/libcst/src/nodes/whitespace.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index 641d9630..44573248 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use pyo3::types::{IntoPyDict, PyModule}; use pyo3::{IntoPy, PyErr, PyErrArguments, Python}; diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index f79594f2..a291e132 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/parser/mod.rs b/native/libcst/src/parser/mod.rs index 76094afb..8e6ec8e1 100644 --- a/native/libcst/src/parser/mod.rs +++ b/native/libcst/src/parser/mod.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + mod errors; mod grammar; mod numbers; diff --git a/native/libcst/src/parser/numbers.rs b/native/libcst/src/parser/numbers.rs index abe23065..19d12214 100644 --- a/native/libcst/src/parser/numbers.rs +++ b/native/libcst/src/parser/numbers.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use once_cell::sync::Lazy; use regex::Regex; diff --git a/native/libcst/src/py.rs b/native/libcst/src/py.rs index 827e6b2d..b938b691 100644 --- a/native/libcst/src/py.rs +++ b/native/libcst/src/py.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use pyo3::prelude::*; #[pymodule] diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 7c9f0684..5e8c29a1 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -1,4 +1,4 @@ -// This implementation is Copyright (c) Facebook, Inc. and its affiliates. +// This implementation is Copyright (c) Meta Platforms, Inc. and its affiliates. // // CPython 3.10.0a5 and the original C code this is based on is // Copyright (c) 2001-2021 Python Software Foundation; All Rights Reserved diff --git a/native/libcst/src/tokenizer/core/string_types.rs b/native/libcst/src/tokenizer/core/string_types.rs index 0d14d1e8..27f0be36 100644 --- a/native/libcst/src/tokenizer/core/string_types.rs +++ b/native/libcst/src/tokenizer/core/string_types.rs @@ -1,4 +1,4 @@ -// This implementation is Copyright (c) Facebook, Inc. and its affiliates. +// This implementation is Copyright (c) Meta Platforms, Inc. and its affiliates. // // CPython 3.10.0a5 and the original C code this is based on is // Copyright (c) 2001-2021 Python Software Foundation; All Rights Reserved diff --git a/native/libcst/src/tokenizer/debug_utils.rs b/native/libcst/src/tokenizer/debug_utils.rs index 8e646f8d..543258a1 100644 --- a/native/libcst/src/tokenizer/debug_utils.rs +++ b/native/libcst/src/tokenizer/debug_utils.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/mod.rs b/native/libcst/src/tokenizer/mod.rs index e900f511..60f1a20d 100644 --- a/native/libcst/src/tokenizer/mod.rs +++ b/native/libcst/src/tokenizer/mod.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/operators.rs b/native/libcst/src/tokenizer/operators.rs index e3098cfa..94f70805 100644 --- a/native/libcst/src/tokenizer/operators.rs +++ b/native/libcst/src/tokenizer/operators.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index d839e815..6b4dfbb1 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/text_position/char_width.rs b/native/libcst/src/tokenizer/text_position/char_width.rs index 84f36405..879e1283 100644 --- a/native/libcst/src/tokenizer/text_position/char_width.rs +++ b/native/libcst/src/tokenizer/text_position/char_width.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/text_position/mod.rs b/native/libcst/src/tokenizer/text_position/mod.rs index b6905484..f94d7311 100644 --- a/native/libcst/src/tokenizer/text_position/mod.rs +++ b/native/libcst/src/tokenizer/text_position/mod.rs @@ -1,4 +1,4 @@ -// Copyright (c) Facebook, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/whitespace_parser.rs b/native/libcst/src/tokenizer/whitespace_parser.rs index e624bc3d..ec463f99 100644 --- a/native/libcst/src/tokenizer/whitespace_parser.rs +++ b/native/libcst/src/tokenizer/whitespace_parser.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use crate::nodes::{ Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, SimpleWhitespace, TrailingWhitespace, diff --git a/native/libcst/tests/parser_roundtrip.rs b/native/libcst/tests/parser_roundtrip.rs index 2b8c79dd..b16a46f0 100644 --- a/native/libcst/tests/parser_roundtrip.rs +++ b/native/libcst/tests/parser_roundtrip.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use difference::assert_diff; use itertools::Itertools; use libcst_native::{parse_module, prettify_error, Codegen}; diff --git a/native/libcst_derive/src/codegen.rs b/native/libcst_derive/src/codegen.rs index 208d5104..4fce05df 100644 --- a/native/libcst_derive/src/codegen.rs +++ b/native/libcst_derive/src/codegen.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use proc_macro::TokenStream; use quote::{quote, quote_spanned}; use syn::{self, spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; diff --git a/native/libcst_derive/src/inflate.rs b/native/libcst_derive/src/inflate.rs index 6223a256..323160c1 100644 --- a/native/libcst_derive/src/inflate.rs +++ b/native/libcst_derive/src/inflate.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use proc_macro::TokenStream; use quote::{quote, quote_spanned}; use syn::{self, spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs index 26da5fca..2ba4160c 100644 --- a/native/libcst_derive/src/into_py.rs +++ b/native/libcst_derive/src/into_py.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use proc_macro::TokenStream; use quote::{format_ident, quote, quote_spanned, ToTokens}; use syn::{ diff --git a/native/libcst_derive/src/lib.rs b/native/libcst_derive/src/lib.rs index 4a687bec..97d1e321 100644 --- a/native/libcst_derive/src/lib.rs +++ b/native/libcst_derive/src/lib.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + mod inflate; use inflate::impl_inflate; mod parenthesized_node; diff --git a/native/libcst_derive/src/parenthesized_node.rs b/native/libcst_derive/src/parenthesized_node.rs index 52d2aab4..fe716510 100644 --- a/native/libcst_derive/src/parenthesized_node.rs +++ b/native/libcst_derive/src/parenthesized_node.rs @@ -1,3 +1,8 @@ +// Copyright (c) Meta Platforms, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + use proc_macro::TokenStream; use quote::{quote, quote_spanned}; use syn::{spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; diff --git a/setup.py b/setup.py index 5558798f..0418cf26 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/parser_config.pyi b/stubs/libcst_native/parser_config.pyi index 0165df79..412da05e 100644 --- a/stubs/libcst_native/parser_config.pyi +++ b/stubs/libcst_native/parser_config.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -14,11 +14,9 @@ class BaseWhitespaceParserConfig: lines: Sequence[str], default_newline: str, ) -> BaseWhitespaceParserConfig: ... - lines: Sequence[str] default_newline: str - class ParserConfig(BaseWhitespaceParserConfig): def __new__( cls, @@ -31,7 +29,6 @@ class ParserConfig(BaseWhitespaceParserConfig): version: PythonVersionInfo, future_imports: FrozenSet[str], ) -> BaseWhitespaceParserConfig: ... - # lines is inherited encoding: str default_indent: str @@ -40,5 +37,4 @@ class ParserConfig(BaseWhitespaceParserConfig): version: PythonVersionInfo future_imports: FrozenSet[str] - def parser_config_asdict(config: ParserConfig) -> Mapping[str, Any]: ... diff --git a/stubs/libcst_native/token_type.pyi b/stubs/libcst_native/token_type.pyi index a0dd8179..4dacfad7 100644 --- a/stubs/libcst_native/token_type.pyi +++ b/stubs/libcst_native/token_type.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/tokenize.pyi b/stubs/libcst_native/tokenize.pyi index bbcbeab0..91c6d2e9 100644 --- a/stubs/libcst_native/tokenize.pyi +++ b/stubs/libcst_native/tokenize.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/whitespace_parser.pyi b/stubs/libcst_native/whitespace_parser.pyi index 291f6dc6..b2b51b54 100644 --- a/stubs/libcst_native/whitespace_parser.pyi +++ b/stubs/libcst_native/whitespace_parser.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/whitespace_state.pyi b/stubs/libcst_native/whitespace_state.pyi index da43bd54..82db9527 100644 --- a/stubs/libcst_native/whitespace_state.pyi +++ b/stubs/libcst_native/whitespace_state.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -7,7 +7,6 @@ class WhitespaceState: def __new__( cls, line: int, column: int, absolute_indent: str, is_parenthesized: bool ) -> WhitespaceState: ... - line: int # one-indexed (to match parso's behavior) column: int # zero-indexed (to match parso's behavior) # What to look for when executing `_parse_indent`. From 67db03915d34941a9f7c3d218eb065f2464d70bc Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 29 Dec 2021 21:23:46 +0000 Subject: [PATCH 167/632] implement PEP-654: except* (#571) --- libcst/__init__.py | 4 + libcst/_nodes/statement.py | 149 +++++++- libcst/_nodes/tests/test_try.py | 162 +++++++++ libcst/_typed_visitor.py | 148 ++++++++ libcst/matchers/__init__.py | 357 +++++++++++++++++++ libcst/matchers/_return_types.py | 4 + native/libcst/src/nodes/mod.rs | 8 +- native/libcst/src/nodes/statement.rs | 116 ++++++ native/libcst/src/parser/grammar.rs | 56 +++ native/libcst/tests/fixtures/starry_tries.py | 64 ++++ 10 files changed, 1063 insertions(+), 5 deletions(-) create mode 100644 native/libcst/tests/fixtures/starry_tries.py diff --git a/libcst/__init__.py b/libcst/__init__.py index c51dd9bc..945cb97a 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -153,6 +153,7 @@ from libcst._nodes.statement import ( Del, Else, ExceptHandler, + ExceptStarHandler, Expr, Finally, For, @@ -171,6 +172,7 @@ from libcst._nodes.statement import ( SimpleStatementLine, SimpleStatementSuite, Try, + TryStar, While, With, WithItem, @@ -367,6 +369,7 @@ __all__ = [ "Del", "Else", "ExceptHandler", + "ExceptStarHandler", "Expr", "Finally", "For", @@ -385,6 +388,7 @@ __all__ = [ "SimpleStatementLine", "SimpleStatementSuite", "Try", + "TryStar", "While", "With", "WithItem", diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index bf82d8a1..06f7af1a 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -829,6 +829,82 @@ class ExceptHandler(CSTNode): self.body._codegen(state) +@add_slots +@dataclass(frozen=True) +class ExceptStarHandler(CSTNode): + """ + An ``except*`` clause that appears after a :class:`TryStar` statement. + """ + + #: The body of the except. + body: BaseSuite + + #: The type of exception this catches. Can be a tuple in some cases. + type: BaseExpression + + #: The optional name that a caught exception is assigned to. + name: Optional[AsName] = None + + #: Sequence of empty lines appearing before this compound statement line. + leading_lines: Sequence[EmptyLine] = () + + #: The whitespace between the ``except`` keyword and the star. + whitespace_after_except: SimpleWhitespace = SimpleWhitespace.field("") + + #: The whitespace between the star and the type. + whitespace_after_star: SimpleWhitespace = SimpleWhitespace.field(" ") + + #: The whitespace after any type or name node (whichever comes last) and + #: the colon. + whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") + + def _validate(self) -> None: + name = self.name + if name is not None and not isinstance(name.name, Name): + raise CSTValidationError( + "Must use a Name node for AsName name inside ExceptHandler." + ) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ExceptStarHandler": + return ExceptStarHandler( + leading_lines=visit_sequence( + self, "leading_lines", self.leading_lines, visitor + ), + whitespace_after_except=visit_required( + self, "whitespace_after_except", self.whitespace_after_except, visitor + ), + whitespace_after_star=visit_required( + self, "whitespace_after_star", self.whitespace_after_star, visitor + ), + type=visit_required(self, "type", self.type, visitor), + name=visit_optional(self, "name", self.name, visitor), + whitespace_before_colon=visit_required( + self, "whitespace_before_colon", self.whitespace_before_colon, visitor + ), + body=visit_required(self, "body", self.body, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + for ll in self.leading_lines: + ll._codegen(state) + state.add_indent_tokens() + + with state.record_syntactic_position(self, end_node=self.body): + state.add_token("except") + self.whitespace_after_except._codegen(state) + state.add_token("*") + self.whitespace_after_star._codegen(state) + typenode = self.type + if typenode is not None: + typenode._codegen(state) + namenode = self.name + if namenode is not None: + namenode._codegen(state) + self.whitespace_before_colon._codegen(state) + state.add_token(":") + self.body._codegen(state) + + @add_slots @dataclass(frozen=True) class Finally(CSTNode): @@ -873,7 +949,9 @@ class Finally(CSTNode): @dataclass(frozen=True) class Try(BaseCompoundStatement): """ - A ``try`` statement. + A regular ``try`` statement that cannot contain :class:`ExceptStar` blocks. For + ``try`` statements that can contain :class:`ExceptStar` blocks, see + :class:`TryStar`. """ #: The suite that is wrapped with a try statement. @@ -948,6 +1026,75 @@ class Try(BaseCompoundStatement): finalbody._codegen(state) +@add_slots +@dataclass(frozen=True) +class TryStar(BaseCompoundStatement): + """ + A ``try`` statement with ``except*`` blocks. + """ + + #: The suite that is wrapped with a try statement. + body: BaseSuite + + #: A list of one or more exception handlers. + handlers: Sequence[ExceptStarHandler] + + #: An optional else case. + orelse: Optional[Else] = None + + #: An optional finally case. + finalbody: Optional[Finally] = None + + #: Sequence of empty lines appearing before this compound statement line. + leading_lines: Sequence[EmptyLine] = () + + #: The whitespace that appears after the ``try`` keyword but before + #: the colon. + whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") + + def _validate(self) -> None: + if len(self.handlers) == 0: + raise CSTValidationError( + "A TryStar statement must have at least one ExceptHandler" + ) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TryStar": + return TryStar( + leading_lines=visit_sequence( + self, "leading_lines", self.leading_lines, visitor + ), + whitespace_before_colon=visit_required( + self, "whitespace_before_colon", self.whitespace_before_colon, visitor + ), + body=visit_required(self, "body", self.body, visitor), + handlers=visit_sequence(self, "handlers", self.handlers, visitor), + orelse=visit_optional(self, "orelse", self.orelse, visitor), + finalbody=visit_optional(self, "finalbody", self.finalbody, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + for ll in self.leading_lines: + ll._codegen(state) + state.add_indent_tokens() + + end_node = self.handlers[-1] + orelse = self.orelse + end_node = end_node if orelse is None else orelse + finalbody = self.finalbody + end_node = end_node if finalbody is None else finalbody + with state.record_syntactic_position(self, end_node=end_node): + state.add_token("try") + self.whitespace_before_colon._codegen(state) + state.add_token(":") + self.body._codegen(state) + for handler in self.handlers: + handler._codegen(state) + if orelse is not None: + orelse._codegen(state) + if finalbody is not None: + finalbody._codegen(state) + + @add_slots @dataclass(frozen=True) class ImportAlias(CSTNode): diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index a0eceac9..f61cbde0 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -8,9 +8,12 @@ from typing import Any import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider +native_parse_statement = parse_statement if is_native() else None + class TryTest(CSTNodeTest): @data_provider( @@ -407,3 +410,162 @@ class TryTest(CSTNodeTest): ) def test_invalid(self, **kwargs: Any) -> None: self.assert_invalid(**kwargs) + + +class TryStarTest(CSTNodeTest): + @data_provider( + ( + # Try/except with a class + { + "node": cst.TryStar( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=( + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Name("Exception"), + ), + ), + ), + "code": "try: pass\nexcept* Exception: pass\n", + "parser": native_parse_statement, + }, + # Try/except with a named class + { + "node": cst.TryStar( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=( + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Name("Exception"), + name=cst.AsName(cst.Name("exc")), + ), + ), + ), + "code": "try: pass\nexcept* Exception as exc: pass\n", + "parser": native_parse_statement, + "expected_position": CodeRange((1, 0), (2, 30)), + }, + # Try/except with multiple clauses + { + "node": cst.TryStar( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=( + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Name("TypeError"), + name=cst.AsName(cst.Name("e")), + ), + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Name("KeyError"), + name=cst.AsName(cst.Name("e")), + ), + ), + ), + "code": "try: pass\n" + + "except* TypeError as e: pass\n" + + "except* KeyError as e: pass\n", + "parser": native_parse_statement, + "expected_position": CodeRange((1, 0), (3, 27)), + }, + # Simple try/except/finally block + { + "node": cst.TryStar( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=( + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Name("KeyError"), + whitespace_after_except=cst.SimpleWhitespace(""), + ), + ), + finalbody=cst.Finally(cst.SimpleStatementSuite((cst.Pass(),))), + ), + "code": "try: pass\nexcept* KeyError: pass\nfinally: pass\n", + "parser": native_parse_statement, + "expected_position": CodeRange((1, 0), (3, 13)), + }, + # Simple try/except/else block + { + "node": cst.TryStar( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=( + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Name("KeyError"), + whitespace_after_except=cst.SimpleWhitespace(""), + ), + ), + orelse=cst.Else(cst.SimpleStatementSuite((cst.Pass(),))), + ), + "code": "try: pass\nexcept* KeyError: pass\nelse: pass\n", + "parser": native_parse_statement, + "expected_position": CodeRange((1, 0), (3, 10)), + }, + # Verify whitespace in various locations + { + "node": cst.TryStar( + leading_lines=(cst.EmptyLine(comment=cst.Comment("# 1")),), + body=cst.SimpleStatementSuite((cst.Pass(),)), + handlers=( + cst.ExceptStarHandler( + leading_lines=(cst.EmptyLine(comment=cst.Comment("# 2")),), + type=cst.Name("TypeError"), + name=cst.AsName( + cst.Name("e"), + whitespace_before_as=cst.SimpleWhitespace(" "), + whitespace_after_as=cst.SimpleWhitespace(" "), + ), + whitespace_after_except=cst.SimpleWhitespace(" "), + whitespace_after_star=cst.SimpleWhitespace(""), + whitespace_before_colon=cst.SimpleWhitespace(" "), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + ), + orelse=cst.Else( + leading_lines=(cst.EmptyLine(comment=cst.Comment("# 3")),), + body=cst.SimpleStatementSuite((cst.Pass(),)), + whitespace_before_colon=cst.SimpleWhitespace(" "), + ), + finalbody=cst.Finally( + leading_lines=(cst.EmptyLine(comment=cst.Comment("# 4")),), + body=cst.SimpleStatementSuite((cst.Pass(),)), + whitespace_before_colon=cst.SimpleWhitespace(" "), + ), + whitespace_before_colon=cst.SimpleWhitespace(" "), + ), + "code": "# 1\ntry : pass\n# 2\nexcept *TypeError as e : pass\n# 3\nelse : pass\n# 4\nfinally : pass\n", + "parser": native_parse_statement, + "expected_position": CodeRange((2, 0), (8, 14)), + }, + # Now all together + { + "node": cst.TryStar( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=( + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Name("TypeError"), + name=cst.AsName(cst.Name("e")), + ), + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Name("KeyError"), + name=cst.AsName(cst.Name("e")), + ), + ), + orelse=cst.Else(cst.SimpleStatementSuite((cst.Pass(),))), + finalbody=cst.Finally(cst.SimpleStatementSuite((cst.Pass(),))), + ), + "code": "try: pass\n" + + "except* TypeError as e: pass\n" + + "except* KeyError as e: pass\n" + + "else: pass\n" + + "finally: pass\n", + "parser": native_parse_statement, + "expected_position": CodeRange((1, 0), (5, 13)), + }, + ) + ) + def test_valid(self, **kwargs: Any) -> None: + self.validate_node(**kwargs) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 82ca072d..0cc05ff4 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -148,6 +148,7 @@ if TYPE_CHECKING: Del, Else, ExceptHandler, + ExceptStarHandler, Expr, Finally, For, @@ -166,6 +167,7 @@ if TYPE_CHECKING: SimpleStatementLine, SimpleStatementSuite, Try, + TryStar, While, With, WithItem, @@ -1778,6 +1780,78 @@ class CSTTypedBaseFunctions: ) -> None: pass + @mark_no_op + def visit_ExceptStarHandler(self, node: "ExceptStarHandler") -> Optional[bool]: + pass + + @mark_no_op + def visit_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_after_except( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_after_except( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_after_star( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_after_star( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_before_colon( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_before_colon( + self, node: "ExceptStarHandler" + ) -> None: + pass + @mark_no_op def visit_Expr(self, node: "Expr") -> Optional[bool]: pass @@ -4454,6 +4528,58 @@ class CSTTypedBaseFunctions: def leave_Try_whitespace_before_colon(self, node: "Try") -> None: pass + @mark_no_op + def visit_TryStar(self, node: "TryStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_TryStar_body(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_body(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_handlers(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_handlers(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_orelse(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_orelse(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_finalbody(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_finalbody(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_leading_lines(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_leading_lines(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: + pass + @mark_no_op def visit_Tuple(self, node: "Tuple") -> Optional[bool]: pass @@ -4888,6 +5014,10 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_ExceptHandler(self, original_node: "ExceptHandler") -> None: pass + @mark_no_op + def leave_ExceptStarHandler(self, original_node: "ExceptStarHandler") -> None: + pass + @mark_no_op def leave_Expr(self, original_node: "Expr") -> None: pass @@ -5240,6 +5370,10 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_Try(self, original_node: "Try") -> None: pass + @mark_no_op + def leave_TryStar(self, original_node: "TryStar") -> None: + pass + @mark_no_op def leave_Tuple(self, original_node: "Tuple") -> None: pass @@ -5574,6 +5708,14 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> Union["ExceptHandler", FlattenSentinel["ExceptHandler"], RemovalSentinel]: return updated_node + @mark_no_op + def leave_ExceptStarHandler( + self, original_node: "ExceptStarHandler", updated_node: "ExceptStarHandler" + ) -> Union[ + "ExceptStarHandler", FlattenSentinel["ExceptStarHandler"], RemovalSentinel + ]: + return updated_node + @mark_no_op def leave_Expr( self, original_node: "Expr", updated_node: "Expr" @@ -6114,6 +6256,12 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: return updated_node + @mark_no_op + def leave_TryStar( + self, original_node: "TryStar", updated_node: "TryStar" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + @mark_no_op def leave_Tuple( self, original_node: "Tuple", updated_node: "Tuple" diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 2d0121a0..cd239a92 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -4341,6 +4341,142 @@ class ExceptHandler(BaseMatcherNode): ] = DoNotCare() +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ExceptStarHandler(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + type: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + name: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_except: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_star: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + @dataclass(frozen=True, eq=False, unsafe_hash=False) class Expr(BaseSmallStatement, BaseMatcherNode): value: Union[ @@ -11938,6 +12074,225 @@ class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] = DoNotCare() +ExceptStarHandlerMatchType = Union[ + "ExceptStarHandler", MetadataMatchType, MatchIfTrue[cst.ExceptStarHandler] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TryStar(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + handlers: Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + OneOf[ + Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + ] + ], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + finalbody: Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + AllOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + @dataclass(frozen=True, eq=False, unsafe_hash=False) class Tuple( BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode @@ -13023,6 +13378,7 @@ __all__ = [ "EmptyLine", "Equal", "ExceptHandler", + "ExceptStarHandler", "Expr", "Finally", "Float", @@ -13119,6 +13475,7 @@ __all__ = [ "SubtractAssign", "TrailingWhitespace", "Try", + "TryStar", "Tuple", "TypeOf", "UnaryOperation", diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index af0017f9..e10f47cf 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -144,6 +144,7 @@ from libcst._nodes.statement import ( Del, Else, ExceptHandler, + ExceptStarHandler, Expr, Finally, For, @@ -162,6 +163,7 @@ from libcst._nodes.statement import ( SimpleStatementLine, SimpleStatementSuite, Try, + TryStar, While, With, WithItem, @@ -229,6 +231,7 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { EmptyLine: Union[EmptyLine, RemovalSentinel], Equal: BaseCompOp, ExceptHandler: Union[ExceptHandler, RemovalSentinel], + ExceptStarHandler: Union[ExceptStarHandler, RemovalSentinel], Expr: Union[BaseSmallStatement, RemovalSentinel], Finally: Finally, Float: BaseExpression, @@ -316,6 +319,7 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { SubtractAssign: BaseAugOp, TrailingWhitespace: TrailingWhitespace, Try: Union[BaseStatement, RemovalSentinel], + TryStar: Union[BaseStatement, RemovalSentinel], Tuple: BaseExpression, UnaryOperation: BaseExpression, While: Union[BaseStatement, RemovalSentinel], diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index f39cf607..d30e5549 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -12,10 +12,10 @@ mod statement; pub use statement::{ AnnAssign, Annotation, AsName, Assert, Assign, AssignTarget, AssignTargetExpression, AugAssign, Break, ClassDef, CompoundStatement, Continue, Decorator, Del, DelTargetExpression, Else, - ExceptHandler, Expr, Finally, For, FunctionDef, Global, If, Import, ImportAlias, ImportFrom, - ImportNames, IndentedBlock, NameItem, Nonlocal, OrElse, Pass, Raise, Return, - SimpleStatementLine, SimpleStatementSuite, SmallStatement, Statement, Suite, Try, While, With, - WithItem, + ExceptHandler, ExceptStarHandler, Expr, Finally, For, FunctionDef, Global, If, Import, + ImportAlias, ImportFrom, ImportNames, IndentedBlock, NameItem, Nonlocal, OrElse, Pass, Raise, + Return, SimpleStatementLine, SimpleStatementSuite, SmallStatement, Statement, Suite, Try, + TryStar, While, With, WithItem, }; mod expression; diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 2247045b..7f717353 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -53,6 +53,7 @@ pub enum CompoundStatement<'a> { While(While<'a>), ClassDef(ClassDef<'a>), Try(Try<'a>), + TryStar(TryStar<'a>), With(With<'a>), } @@ -65,6 +66,7 @@ impl<'a> WithLeadingLines<'a> for CompoundStatement<'a> { Self::While(f) => &mut f.leading_lines, Self::ClassDef(c) => &mut c.leading_lines, Self::Try(t) => &mut t.leading_lines, + Self::TryStar(t) => &mut t.leading_lines, Self::With(w) => &mut w.leading_lines, } } @@ -1713,6 +1715,68 @@ impl<'a> Inflate<'a> for ExceptHandler<'a> { } } +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct ExceptStarHandler<'a> { + pub body: Suite<'a>, + pub r#type: Expression<'a>, + pub name: Option>, + pub leading_lines: Vec>, + pub whitespace_after_except: SimpleWhitespace<'a>, + pub whitespace_after_star: SimpleWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) except_tok: TokenRef<'a>, + pub(crate) star_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for ExceptStarHandler<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + state.add_indent(); + + state.add_token("except"); + self.whitespace_after_except.codegen(state); + state.add_token("*"); + self.whitespace_after_star.codegen(state); + self.r#type.codegen(state); + if let Some(n) = &self.name { + n.codegen(state); + } + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + } +} + +impl<'a> Inflate<'a> for ExceptStarHandler<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut self.except_tok.whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_after_except = + parse_simple_whitespace(config, &mut self.except_tok.whitespace_after.borrow_mut())?; + self.whitespace_after_star = + parse_simple_whitespace(config, &mut self.star_tok.whitespace_after.borrow_mut())?; + + self.r#type = self.r#type.inflate(config)?; + self.name = self.name.inflate(config)?; + if self.name.is_some() { + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut self.colon_tok.whitespace_before.borrow_mut(), + )?; + } + + self.body = self.body.inflate(config)?; + Ok(self) + } +} + #[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Try<'a> { pub body: Suite<'a>, @@ -1765,6 +1829,58 @@ impl<'a> Inflate<'a> for Try<'a> { } } +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct TryStar<'a> { + pub body: Suite<'a>, + pub handlers: Vec>, + pub orelse: Option>, + pub finalbody: Option>, + pub leading_lines: Vec>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) try_tok: TokenRef<'a>, + // colon_tok unnecessary +} + +impl<'a> Codegen<'a> for TryStar<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for ll in &self.leading_lines { + ll.codegen(state); + } + state.add_indent(); + state.add_token("try"); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + for h in &self.handlers { + h.codegen(state); + } + if let Some(e) = &self.orelse { + e.codegen(state); + } + if let Some(f) = &self.finalbody { + f.codegen(state); + } + } +} + +impl<'a> Inflate<'a> for TryStar<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut (*self.try_tok).whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_before_colon = + parse_simple_whitespace(config, &mut (*self.try_tok).whitespace_after.borrow_mut())?; + self.body = self.body.inflate(config)?; + self.handlers = self.handlers.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; + self.finalbody = self.finalbody.inflate(config)?; + Ok(self) + } +} + #[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct AugAssign<'a> { pub target: AssignTargetExpression<'a>, diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index a291e132..1bf7ca36 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -148,6 +148,7 @@ parser! { / &(lit("with") / tok(Async, "ASYNC")) w:with_stmt() { CompoundStatement::With(w) } / &(lit("for") / tok(Async, "ASYNC")) f:for_stmt() { CompoundStatement::For(f) } / &lit("try") t:try_stmt() { CompoundStatement::Try(t) } + / &lit("try") t:try_star_stmt() { CompoundStatement::TryStar(t) } / &lit("while") w:while_stmt() { CompoundStatement::While(w) } // Simple statements @@ -498,6 +499,13 @@ parser! { make_try(kw, b, ex, el, f) } + // Note: this is separate because TryStar is a different type in LibCST + rule try_star_stmt() -> TryStar<'a> + = kw:lit("try") lit(":") b:block() ex:except_star_block()+ + el:else_block()? f:finally_block()? { + make_try_star(kw, b, ex, el, f) + } + // Except statement rule except_block() -> ExceptHandler<'a> @@ -509,6 +517,12 @@ parser! { make_except(kw, None, None, col, b) } + rule except_star_block() -> ExceptStarHandler<'a> + = kw:lit("except") star:lit("*") e:expression() + a:(k:lit("as") n:name() {(k, n)})? col:lit(":") b:block() { + make_except_star(kw, star, e, a, col, b) + } + rule finally_block() -> Finally<'a> = kw:lit("finally") col:lit(":") b:block() { make_finally(kw, col, b) @@ -2814,6 +2828,30 @@ fn make_except<'a>( } } +fn make_except_star<'a>( + except_tok: TokenRef<'a>, + star_tok: TokenRef<'a>, + exp: Expression<'a>, + as_: Option<(TokenRef<'a>, Name<'a>)>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> ExceptStarHandler<'a> { + // TODO: AsName should come from outside + let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))); + ExceptStarHandler { + body, + r#type: exp, + name, + leading_lines: Default::default(), + whitespace_after_except: Default::default(), + whitespace_after_star: Default::default(), + whitespace_before_colon: Default::default(), + except_tok, + colon_tok, + star_tok, + } +} + fn make_try<'a>( try_tok: TokenRef<'a>, body: Suite<'a>, @@ -2832,6 +2870,24 @@ fn make_try<'a>( } } +fn make_try_star<'a>( + try_tok: TokenRef<'a>, + body: Suite<'a>, + handlers: Vec>, + orelse: Option>, + finalbody: Option>, +) -> TryStar<'a> { + TryStar { + body, + handlers, + orelse, + finalbody, + leading_lines: Default::default(), + whitespace_before_colon: Default::default(), + try_tok, + } +} + fn make_aug_op(tok: TokenRef) -> Result { let whitespace_before = Default::default(); let whitespace_after = Default::default(); diff --git a/native/libcst/tests/fixtures/starry_tries.py b/native/libcst/tests/fixtures/starry_tries.py new file mode 100644 index 00000000..2749a031 --- /dev/null +++ b/native/libcst/tests/fixtures/starry_tries.py @@ -0,0 +1,64 @@ +#foo. + +try : + pass + + # foo + +except * lol as LOL : + + pass + +except * f: + + # foo + + pass + +else : + + pass + +finally : + + foo + +try: + pass +except*f: + pass +finally: + pass + + +try: + + # 1 + + try: + + # 2 + + pass + + # 3 + + # 4 + + finally: + + # 5 + + pass + + # 6 + + # 7 + +except *foo: + + #8 + + pass + + #9 From 9932a6d3394bc6757af8c8346416848c234e9e39 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 30 Dec 2021 10:00:51 +0000 Subject: [PATCH 168/632] Implement PEP-634 - Match statement (#568) * ParenthesizedNode implementation for Box * match statement rust CST and grammar * match statement python CST and docs * run rust unit tests in release mode for now --- .github/workflows/build.yml | 2 +- libcst/__init__.py | 34 + libcst/_nodes/statement.py | 825 +++++- libcst/_nodes/tests/test_match.py | 430 +++ libcst/_typed_visitor.py | 827 +++++- libcst/matchers/__init__.py | 2379 +++++++++++++++++ libcst/matchers/_return_types.py | 36 +- native/libcst/src/nodes/mod.rs | 11 +- native/libcst/src/nodes/op.rs | 30 + native/libcst/src/nodes/statement.rs | 798 +++++- native/libcst/src/nodes/traits.rs | 19 + native/libcst/src/parser/grammar.rs | 492 ++++ .../libcst/tests/fixtures/malicious_match.py | 39 + 13 files changed, 5912 insertions(+), 10 deletions(-) create mode 100644 libcst/_nodes/tests/test_match.py create mode 100644 native/libcst/tests/fixtures/malicious_match.py diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9ea35898..31914cc6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -260,7 +260,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: test - args: --manifest-path=native/Cargo.toml + args: --manifest-path=native/Cargo.toml --release - name: clippy uses: actions-rs/clippy-check@v1 with: diff --git a/libcst/__init__.py b/libcst/__init__.py index 945cb97a..ff63033d 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -164,6 +164,23 @@ from libcst._nodes.statement import ( ImportAlias, ImportFrom, IndentedBlock, + Match, + MatchAs, + MatchCase, + MatchClass, + MatchKeywordElement, + MatchList, + MatchMapping, + MatchMappingElement, + MatchOr, + MatchOrElement, + MatchPattern, + MatchSequence, + MatchSequenceElement, + MatchSingleton, + MatchStar, + MatchTuple, + MatchValue, NameItem, Nonlocal, Pass, @@ -380,6 +397,23 @@ __all__ = [ "ImportAlias", "ImportFrom", "IndentedBlock", + "Match", + "MatchCase", + "MatchAs", + "MatchClass", + "MatchKeywordElement", + "MatchList", + "MatchMapping", + "MatchMappingElement", + "MatchOr", + "MatchOrElement", + "MatchPattern", + "MatchSequence", + "MatchSequenceElement", + "MatchSingleton", + "MatchStar", + "MatchTuple", + "MatchValue", "NameItem", "Nonlocal", "Pass", diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 06f7af1a..7d9208df 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -6,13 +6,14 @@ import inspect import re from abc import ABC, abstractmethod -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import Optional, Pattern, Sequence, Union from libcst._add_slots import add_slots from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.base import CSTNode, CSTValidationError from libcst._nodes.expression import ( + _BaseParenthesizedNode, Annotation, Arg, Asynchronous, @@ -24,11 +25,15 @@ from libcst._nodes.expression import ( ConcatenatedString, ExpressionPosition, From, + LeftCurlyBrace, LeftParen, + LeftSquareBracket, List, Name, Parameters, + RightCurlyBrace, RightParen, + RightSquareBracket, SimpleString, Tuple, ) @@ -40,7 +45,15 @@ from libcst._nodes.internal import ( visit_sentinel, visit_sequence, ) -from libcst._nodes.op import AssignEqual, BaseAugOp, Comma, Dot, ImportStar, Semicolon +from libcst._nodes.op import ( + AssignEqual, + BaseAugOp, + BitOr, + Comma, + Dot, + ImportStar, + Semicolon, +) from libcst._nodes.whitespace import ( BaseParenthesizableWhitespace, EmptyLine, @@ -2566,3 +2579,811 @@ class Nonlocal(BaseSmallStatement): state.add_token("; ") elif isinstance(semicolon, Semicolon): semicolon._codegen(state) + + +class MatchPattern(_BaseParenthesizedNode, ABC): + """ + A base class for anything that can appear as a pattern in a :class:`Match` + statement. + """ + + +@add_slots +@dataclass(frozen=True) +class Match(BaseCompoundStatement): + """ + A ``match`` statement. + """ + + #: The subject of the match. + subject: BaseExpression + + #: A non-empty list of match cases. + cases: Sequence["MatchCase"] + + #: Sequence of empty lines appearing before this compound statement line. + leading_lines: Sequence[EmptyLine] = () + + #: Whitespace between the ``match`` keyword and the subject. + whitespace_after_match: SimpleWhitespace = SimpleWhitespace.field(" ") + + #: Whitespace after the subject but before the colon. + whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") + + #: Any optional trailing comment and the final ``NEWLINE`` at the end of the line. + whitespace_after_colon: TrailingWhitespace = TrailingWhitespace.field() + + #: A string represents a specific indentation. A ``None`` value uses the modules's + #: default indentation. This is included because indentation is allowed to be + #: inconsistent across a file, just not ambiguously. + indent: Optional[str] = None + + #: Any trailing comments or lines after the dedent that are owned by this match + #: block. Statements own preceeding and same-line trailing comments, but not + #: trailing lines, so it falls on :class:`Match` to own it. In the case + #: that a statement follows a :class:`Match` block, that statement will own the + #: comments and lines that are at the same indent as the statement, and this + #: :class:`Match` will own the comments and lines that are indented further. + footer: Sequence[EmptyLine] = () + + def _validate(self) -> None: + if len(self.cases) == 0: + raise CSTValidationError("A match statement must have at least one case.") + + if self.whitespace_after_match.empty: + raise CSTValidationError( + "Must have at least one space after a 'match' keyword" + ) + + indent = self.indent + if indent is not None: + if len(indent) == 0: + raise CSTValidationError( + "A match statement must have a non-zero width indent." + ) + if _INDENT_WHITESPACE_RE.fullmatch(indent) is None: + raise CSTValidationError( + "An indent must be composed of only whitespace characters." + ) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "Match": + return Match( + leading_lines=visit_sequence( + self, "leading_lines", self.leading_lines, visitor + ), + whitespace_after_match=visit_required( + self, "whitespace_after_match", self.whitespace_after_match, visitor + ), + subject=visit_required(self, "subject", self.subject, visitor), + whitespace_before_colon=visit_required( + self, "whitespace_before_colon", self.whitespace_before_colon, visitor + ), + whitespace_after_colon=visit_required( + self, "whitespace_after_colon", self.whitespace_after_colon, visitor + ), + indent=self.indent, + cases=visit_sequence(self, "cases", self.cases, visitor), + footer=visit_sequence(self, "footer", self.footer, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + for ll in self.leading_lines: + ll._codegen(state) + state.add_indent_tokens() + + with state.record_syntactic_position(self, end_node=self.cases[-1]): + state.add_token("match") + self.whitespace_after_match._codegen(state) + self.subject._codegen(state) + self.whitespace_before_colon._codegen(state) + state.add_token(":") + self.whitespace_after_colon._codegen(state) + + indent = self.indent + state.increase_indent(state.default_indent if indent is None else indent) + for c in self.cases: + c._codegen(state) + + for f in self.footer: + f._codegen(state) + + state.decrease_indent() + + +@add_slots +@dataclass(frozen=True) +class MatchCase(CSTNode): + """ + A single ``case`` block of a :class:`Match` statement. + """ + + #: The pattern that ``subject`` will be matched against. + pattern: MatchPattern + + #: The body of this case block, to be evaluated if ``pattern`` matches ``subject`` + #: and ``guard`` evaluates to a truthy value. + body: BaseSuite + + #: Optional expression that will be evaluated if ``pattern`` matches ``subject``. + guard: Optional[BaseExpression] = None + + #: Sequence of empty lines appearing before this case block. + leading_lines: Sequence[EmptyLine] = () + + #: Whitespace directly after the ``case`` keyword. + whitespace_after_case: SimpleWhitespace = SimpleWhitespace.field(" ") + + #: Whitespace before the ``if`` keyword in case there's a guard expression. + whitespace_before_if: SimpleWhitespace = SimpleWhitespace.field("") + + #: Whitespace after the ``if`` keyword in case there's a guard expression. + whitespace_after_if: SimpleWhitespace = SimpleWhitespace.field("") + + #: Whitespace before the colon. + whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "CSTNode": + return MatchCase( + leading_lines=visit_sequence( + self, "leading_lines", self.leading_lines, visitor + ), + whitespace_after_case=visit_required( + self, "whitespace_after_case", self.whitespace_after_case, visitor + ), + pattern=visit_required(self, "pattern", self.pattern, visitor), + whitespace_before_if=visit_optional( + self, "whitespace_before_if", self.whitespace_before_if, visitor + ), + whitespace_after_if=visit_optional( + self, "whitespace_after_if", self.whitespace_after_if, visitor + ), + guard=visit_optional(self, "guard", self.guard, visitor), + body=visit_required(self, "body", self.body, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + for ll in self.leading_lines: + ll._codegen(state) + state.add_indent_tokens() + with state.record_syntactic_position(self, end_node=self.body): + state.add_token("case") + self.whitespace_after_case._codegen(state) + self.pattern._codegen(state) + + guard = self.guard + if guard is not None: + self.whitespace_before_if._codegen(state) + state.add_token("if") + self.whitespace_after_if._codegen(state) + guard._codegen(state) + + self.whitespace_before_colon._codegen(state) + state.add_token(":") + self.body._codegen(state) + + +@add_slots +@dataclass(frozen=True) +class MatchValue(MatchPattern): + """ + A match literal or value pattern that compares by equality. + """ + + #: an expression to compare to + value: BaseExpression + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "CSTNode": + return MatchValue(value=visit_required(self, "value", self.value, visitor)) + + def _codegen_impl(self, state: CodegenState) -> None: + with state.record_syntactic_position(self): + self.value._codegen(state) + + @property + def lpar(self) -> Sequence[LeftParen]: + return self.value.lpar + + @lpar.setter + def lpar(self, value: Sequence[LeftParen]) -> None: + self.value.lpar = value + + +@add_slots +@dataclass(frozen=True) +class MatchSingleton(MatchPattern): + """ + A match literal pattern that compares by identity. + """ + + #: a literal to compare to + value: Name + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "CSTNode": + return MatchSingleton(value=visit_required(self, "value", self.value, visitor)) + + def _validate(self) -> None: + if self.value.value not in {"True", "False", "None"}: + raise CSTValidationError( + "A match singleton can only be True, False, or None" + ) + + def _codegen_impl(self, state: CodegenState) -> None: + with state.record_syntactic_position(self): + self.value._codegen(state) + + @property + def lpar(self) -> Sequence[LeftParen]: + return self.value.lpar + + @lpar.setter + def lpar(self, value: Sequence[LeftParen]) -> None: + self.value.lpar = value + + +@add_slots +@dataclass(frozen=True) +class MatchSequenceElement(CSTNode): + """ + An element in a sequence match pattern. + """ + + value: MatchPattern + + #: An optional trailing comma. + comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT + + def _visit_and_replace_children( + self, visitor: CSTVisitorT + ) -> "MatchSequenceElement": + return MatchSequenceElement( + value=visit_required(self, "value", self.value, visitor), + comma=visit_sentinel(self, "comma", self.comma, visitor), + ) + + def _codegen_impl( + self, + state: CodegenState, + default_comma: bool = False, + default_comma_whitespace: bool = True, + ) -> None: + with state.record_syntactic_position(self): + self.value._codegen(state) + comma = self.comma + if comma is MaybeSentinel.DEFAULT and default_comma: + state.add_token(", " if default_comma_whitespace else ",") + elif isinstance(comma, Comma): + comma._codegen(state) + + +@add_slots +@dataclass(frozen=True) +class MatchStar(CSTNode): + """ + A starred element in a sequence match pattern. Matches the rest of the sequence. + """ + + #: The name of the pattern binding. A ``None`` value represents ``*_``. + name: Optional[Name] = None + + #: An optional trailing comma. + comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT + + #: Optional whitespace between the star and the name. + whitespace_before_name: BaseParenthesizableWhitespace = SimpleWhitespace.field("") + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "MatchStar": + return MatchStar( + whitespace_before_name=visit_required( + self, "whitespace_before_name", self.whitespace_before_name, visitor + ), + name=visit_optional(self, "name", self.name, visitor), + comma=visit_sentinel(self, "comma", self.comma, visitor), + ) + + def _codegen_impl( + self, + state: CodegenState, + default_comma: bool = False, + default_comma_whitespace: bool = True, + ) -> None: + with state.record_syntactic_position(self): + state.add_token("*") + self.whitespace_before_name._codegen(state) + name = self.name + if name is None: + state.add_token("_") + else: + name._codegen(state) + comma = self.comma + if comma is MaybeSentinel.DEFAULT and default_comma: + state.add_token(", " if default_comma_whitespace else ",") + elif isinstance(comma, Comma): + comma._codegen(state) + + +class MatchSequence(MatchPattern, ABC): + """ + A match sequence pattern. It's either a :class:`MatchList` or a :class:`MatchTuple`. + Matches a variable length sequence if one of the patterns is a :class:`MatchStar`, + otherwise matches a fixed length sequence. + """ + + #: Patterns to be matched against the subject elements if it is a sequence. + patterns: Sequence[Union[MatchSequenceElement, MatchStar]] + + +@add_slots +@dataclass(frozen=True) +class MatchList(MatchSequence): + """ + A list match pattern. It's either an "open sequence pattern" (without brackets) or a + regular list literal (with brackets). + """ + + #: Patterns to be matched against the subject elements if it is a sequence. + patterns: Sequence[Union[MatchSequenceElement, MatchStar]] + + #: An optional left bracket. If missing, this is an open sequence pattern. + lbracket: Optional[LeftSquareBracket] = LeftSquareBracket.field() + + #: An optional left bracket. If missing, this is an open sequence pattern. + rbracket: Optional[RightSquareBracket] = RightSquareBracket.field() + + #: Parenthesis at the beginning of the node + lpar: Sequence[LeftParen] = () + #: Parentheses after the pattern, but before a comma (if there is one). + rpar: Sequence[RightParen] = () + + def _validate(self) -> None: + if self.lbracket and not self.rbracket: + raise CSTValidationError("Cannot have left bracket without right bracket") + if self.rbracket and not self.lbracket: + raise CSTValidationError("Cannot have right bracket without left bracket") + + if not self.patterns and not self.lbracket: + raise CSTValidationError( + "Must have brackets if matching against empty list" + ) + + super(MatchList, self)._validate() + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "MatchList": + return MatchList( + lpar=visit_sequence(self, "lpar", self.lpar, visitor), + lbracket=visit_optional(self, "lbracket", self.lbracket, visitor), + patterns=visit_sequence(self, "patterns", self.patterns, visitor), + rbracket=visit_optional(self, "rbracket", self.rbracket, visitor), + rpar=visit_sequence(self, "rpar", self.rpar, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + with self._parenthesize(state): + lbracket = self.lbracket + if lbracket is not None: + lbracket._codegen(state) + pats = self.patterns + for idx, pat in enumerate(pats): + pat._codegen(state, default_comma=(idx < len(pats) - 1)) + rbracket = self.rbracket + if rbracket is not None: + rbracket._codegen(state) + + +@add_slots +@dataclass(frozen=True) +class MatchTuple(MatchSequence): + """ + A tuple match pattern. + """ + + #: Patterns to be matched against the subject elements if it is a sequence. + patterns: Sequence[Union[MatchSequenceElement, MatchStar]] + + #: Parenthesis at the beginning of the node + lpar: Sequence[LeftParen] = field(default_factory=lambda: (LeftParen(),)) + #: Parentheses after the pattern, but before a comma (if there is one). + rpar: Sequence[RightParen] = field(default_factory=lambda: (RightParen(),)) + + def _validate(self) -> None: + if len(self.lpar) < 1: + raise CSTValidationError( + "Tuple patterns must have at least pair of parenthesis" + ) + + super(MatchTuple, self)._validate() + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "MatchTuple": + return MatchTuple( + lpar=visit_sequence(self, "lpar", self.lpar, visitor), + patterns=visit_sequence(self, "patterns", self.patterns, visitor), + rpar=visit_sequence(self, "rpar", self.rpar, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + with self._parenthesize(state): + pats = self.patterns + patlen = len(pats) + for idx, pat in enumerate(pats): + pat._codegen( + state, + default_comma=patlen == 1 or (idx < patlen - 1), + default_comma_whitespace=patlen != 1, + ) + + +@add_slots +@dataclass(frozen=True) +class MatchMappingElement(CSTNode): + """ + A ``key: value`` pair in a match mapping pattern. + """ + + key: BaseExpression + + #: The pattern to be matched corresponding to ``key``. + pattern: MatchPattern + + #: An optional trailing comma. + comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT + + #: Whitespace between ``key`` and the colon. + whitespace_before_colon: BaseParenthesizableWhitespace = SimpleWhitespace.field("") + + #: Whitespace between the colon and ``pattern``. + whitespace_after_colon: BaseParenthesizableWhitespace = SimpleWhitespace.field(" ") + + def _visit_and_replace_children( + self, visitor: CSTVisitorT + ) -> "MatchMappingElement": + return MatchMappingElement( + key=visit_required(self, "key", self.key, visitor), + whitespace_before_colon=visit_required( + self, "whitespace_before_colon", self.whitespace_before_colon, visitor + ), + whitespace_after_colon=visit_required( + self, "whitespace_after_colon", self.whitespace_after_colon, visitor + ), + pattern=visit_required(self, "pattern", self.pattern, visitor), + comma=visit_sentinel(self, "comma", self.comma, visitor), + ) + + def _codegen_impl(self, state: CodegenState, default_comma: bool = False) -> None: + with state.record_syntactic_position(self): + self.key._codegen(state) + self.whitespace_before_colon._codegen(state) + state.add_token(":") + self.whitespace_after_colon._codegen(state) + self.pattern._codegen(state) + comma = self.comma + if comma is MaybeSentinel.DEFAULT and default_comma: + state.add_token(", ") + elif isinstance(comma, Comma): + comma._codegen(state) + + +@add_slots +@dataclass(frozen=True) +class MatchMapping(MatchPattern): + """ + A match mapping pattern. + """ + + #: A sequence of mapping elements. + elements: Sequence[MatchMappingElement] = () + + #: Left curly brace at the beginning of the pattern. + lbrace: LeftCurlyBrace = LeftCurlyBrace.field() + + #: Right curly brace at the end of the pattern. + rbrace: RightCurlyBrace = RightCurlyBrace.field() + + #: An optional name to capture the remaining elements of the mapping. + rest: Optional[Name] = None + + #: Optional whitespace between stars and ``rest``. + whitespace_before_rest: SimpleWhitespace = SimpleWhitespace.field("") + + #: An optional trailing comma attached to ``rest``. + trailing_comma: Optional[Comma] = None + + #: Parenthesis at the beginning of the node + lpar: Sequence[LeftParen] = () + #: Parentheses after the pattern + rpar: Sequence[RightParen] = () + + def _validate(self) -> None: + if isinstance(self.trailing_comma, Comma) and self.rest is not None: + raise CSTValidationError("Cannot have a trailing comma without **rest") + super(MatchMapping, self)._validate() + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "MatchMapping": + return MatchMapping( + lpar=visit_sequence(self, "lpar", self.lpar, visitor), + lbrace=visit_required(self, "lbrace", self.lbrace, visitor), + elements=visit_sequence(self, "elements", self.elements, visitor), + whitespace_before_rest=visit_required( + self, "whitespace_before_rest", self.whitespace_before_rest, visitor + ), + rest=visit_optional(self, "rest", self.rest, visitor), + trailing_comma=visit_optional( + self, "trailing_comma", self.trailing_comma, visitor + ), + rbrace=visit_required(self, "rbrace", self.rbrace, visitor), + rpar=visit_sequence(self, "rpar", self.rpar, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + with self._parenthesize(state): + self.lbrace._codegen(state) + elems = self.elements + rest = self.rest + for idx, el in enumerate(elems): + el._codegen( + state, default_comma=rest is not None or idx < len(elems) - 1 + ) + + if rest is not None: + state.add_token("**") + self.whitespace_before_rest._codegen(state) + rest._codegen(state) + comma = self.trailing_comma + if comma is not None: + comma._codegen(state) + + self.rbrace._codegen(state) + + +@add_slots +@dataclass(frozen=True) +class MatchKeywordElement(CSTNode): + """ + A key=value pair in a :class:`MatchClass`. + """ + + key: Name + + #: The pattern to be matched against the attribute named ``key``. + pattern: MatchPattern + + #: An optional trailing comma. + comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT + + #: Whitespace between ``key`` and the equals sign. + whitespace_before_equal: BaseParenthesizableWhitespace = SimpleWhitespace.field("") + + #: Whitespace between the equals sign and ``pattern``. + whitespace_after_equal: BaseParenthesizableWhitespace = SimpleWhitespace.field("") + + def _visit_and_replace_children( + self, visitor: CSTVisitorT + ) -> "MatchKeywordElement": + return MatchKeywordElement( + key=visit_required(self, "key", self.key, visitor), + whitespace_before_equal=visit_required( + self, "whitespace_before_equal", self.whitespace_before_equal, visitor + ), + whitespace_after_equal=visit_required( + self, "whitespace_after_equal", self.whitespace_after_equal, visitor + ), + pattern=visit_required(self, "pattern", self.pattern, visitor), + comma=visit_sentinel(self, "comma", self.comma, visitor), + ) + + def _codegen_impl(self, state: CodegenState, default_comma: bool = False) -> None: + with state.record_syntactic_position(self): + self.key._codegen(state) + self.whitespace_before_equal._codegen(state) + state.add_token("=") + self.whitespace_after_equal._codegen(state) + self.pattern._codegen(state) + comma = self.comma + if comma is MaybeSentinel.DEFAULT and default_comma: + state.add_token(", ") + elif isinstance(comma, Comma): + comma._codegen(state) + + +@add_slots +@dataclass(frozen=True) +class MatchClass(MatchPattern): + """ + A match class pattern. + """ + + #: An expression giving the nominal class to be matched. + cls: BaseExpression + + #: A sequence of patterns to be matched against the class defined sequence of + #: pattern matching attributes. + patterns: Sequence[MatchSequenceElement] = () + + #: A sequence of additional attribute names and corresponding patterns to be + #: matched. + kwds: Sequence[MatchKeywordElement] = () + + #: Whitespace between the class name and the left parenthesis. + whitespace_after_cls: BaseParenthesizableWhitespace = SimpleWhitespace.field("") + + #: Whitespace between the left parenthesis and the first pattern. + whitespace_before_patterns: BaseParenthesizableWhitespace = SimpleWhitespace.field( + "" + ) + + #: Whitespace between the last pattern and the right parenthesis. + whitespace_after_kwds: BaseParenthesizableWhitespace = SimpleWhitespace.field("") + + #: Parenthesis at the beginning of the node + lpar: Sequence[LeftParen] = () + #: Parentheses after the pattern + rpar: Sequence[RightParen] = () + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "MatchClass": + return MatchClass( + lpar=visit_sequence(self, "lpar", self.lpar, visitor), + cls=visit_required(self, "cls", self.cls, visitor), + whitespace_after_cls=visit_required( + self, "whitespace_after_cls", self.whitespace_after_cls, visitor + ), + whitespace_before_patterns=visit_required( + self, + "whitespace_before_patterns", + self.whitespace_before_patterns, + visitor, + ), + patterns=visit_sequence(self, "patterns", self.patterns, visitor), + kwds=visit_sequence(self, "kwds", self.kwds, visitor), + whitespace_after_kwds=visit_required( + self, "whitespace_after_kwds", self.whitespace_after_kwds, visitor + ), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + with self._parenthesize(state): + self.cls._codegen(state) + self.whitespace_after_cls._codegen(state) + state.add_token("(") + self.whitespace_before_patterns._codegen(state) + pats = self.patterns + kwds = self.kwds + for idx, pat in enumerate(pats): + pat._codegen(state, default_comma=idx + 1 < len(pats) + len(kwds)) + for idx, kwd in enumerate(kwds): + kwd._codegen(state, default_comma=idx + 1 < len(kwds)) + self.whitespace_after_kwds._codegen(state) + state.add_token(")") + + +@add_slots +@dataclass(frozen=True) +class MatchAs(MatchPattern): + """ + A match "as-pattern", capture pattern, or wildcard pattern. + """ + + #: The match pattern that the subject will be matched against. If this is ``None``, + #: the node represents a capture pattern (i.e. a bare name) and will always succeed. + pattern: Optional[MatchPattern] = None + + #: The name that will be bound if the pattern is successful. If this is ``None``, + #: ``pattern`` must also be ``None`` and the node represents the wildcard pattern + #: (i.e. ``_``). + name: Optional[Name] = None + + #: Whitespace between ``pattern`` and the ``as`` keyword (if ``pattern`` is not + #: ``None``) + whitespace_before_as: Union[ + BaseParenthesizableWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT + + #: Whitespace between the ``as`` keyword and ``name`` (if ``pattern`` is not + #: ``None``) + whitespace_after_as: Union[ + BaseParenthesizableWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT + + #: Parenthesis at the beginning of the node + lpar: Sequence[LeftParen] = () + #: Parentheses after the pattern + rpar: Sequence[RightParen] = () + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "MatchAs": + return MatchAs( + lpar=visit_sequence(self, "lpar", self.lpar, visitor), + pattern=visit_optional(self, "pattern", self.pattern, visitor), + whitespace_before_as=visit_sentinel( + self, "whitespace_before_as", self.whitespace_before_as, visitor + ), + whitespace_after_as=visit_sentinel( + self, "whitespace_after_as", self.whitespace_after_as, visitor + ), + name=visit_optional(self, "name", self.name, visitor), + rpar=visit_sequence(self, "rpar", self.rpar, visitor), + ) + + def _validate(self) -> None: + if self.name is None and self.pattern is not None: + raise CSTValidationError("Pattern must be None if name is None") + super(MatchAs, self)._validate() + + def _codegen_impl(self, state: CodegenState) -> None: + with self._parenthesize(state): + pat = self.pattern + name = self.name + if pat is not None: + pat._codegen(state) + ws_before = self.whitespace_before_as + if ws_before is MaybeSentinel.DEFAULT: + state.add_token(" ") + elif isinstance(ws_before, BaseParenthesizableWhitespace): + ws_before._codegen(state) + state.add_token("as") + ws_after = self.whitespace_after_as + if ws_after is MaybeSentinel.DEFAULT: + state.add_token(" ") + elif isinstance(ws_after, BaseParenthesizableWhitespace): + ws_after._codegen(state) + if name is None: + state.add_token("_") + else: + name._codegen(state) + + +@add_slots +@dataclass(frozen=True) +class MatchOrElement(CSTNode): + """ + An element in a :class:`MatchOr` node. + """ + + pattern: MatchPattern + + #: An optional ``|`` separator. + separator: Union[BitOr, MaybeSentinel] = MaybeSentinel.DEFAULT + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "MatchOrElement": + return MatchOrElement( + pattern=visit_required(self, "pattern", self.pattern, visitor), + separator=visit_sentinel(self, "separator", self.separator, visitor), + ) + + def _codegen_impl( + self, state: CodegenState, default_separator: bool = False + ) -> None: + with state.record_syntactic_position(self): + self.pattern._codegen(state) + sep = self.separator + if sep is MaybeSentinel.DEFAULT and default_separator: + state.add_token(" | ") + elif isinstance(sep, BitOr): + sep._codegen(state) + + +@add_slots +@dataclass(frozen=True) +class MatchOr(MatchPattern): + """ + A match "or-pattern". It matches each of its subpatterns in turn to the subject, + until one succeeds. The or-pattern is then deemed to succeed. If none of the + subpatterns succeed the or-pattern fails. + """ + + #: The subpatterns to be tried in turn. + patterns: Sequence[MatchOrElement] + + #: Parenthesis at the beginning of the node + lpar: Sequence[LeftParen] = () + #: Parentheses after the pattern + rpar: Sequence[RightParen] = () + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "MatchOr": + return MatchOr( + lpar=visit_sequence(self, "lpar", self.lpar, visitor), + patterns=visit_sequence(self, "patterns", self.patterns, visitor), + rpar=visit_sequence(self, "rpar", self.rpar, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + with self._parenthesize(state): + pats = self.patterns + for idx, pat in enumerate(pats): + pat._codegen(state, default_separator=idx + 1 < len(pats)) diff --git a/libcst/_nodes/tests/test_match.py b/libcst/_nodes/tests/test_match.py new file mode 100644 index 00000000..ffa7b4b7 --- /dev/null +++ b/libcst/_nodes/tests/test_match.py @@ -0,0 +1,430 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Any + +import libcst as cst +from libcst import parse_statement +from libcst._nodes.tests.base import CSTNodeTest +from libcst._parser.entrypoints import is_native +from libcst.testing.utils import data_provider + +parser = parse_statement if is_native() else None + + +class MatchTest(CSTNodeTest): + # pyre-fixme[56]: Invalid decoration - Pyre was not able to infer the type + @data_provider( + ( + # Values and singletons + { + "node": cst.Match( + subject=cst.Name("x"), + cases=[ + cst.MatchCase( + pattern=cst.MatchSingleton(cst.Name("None")), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( + pattern=cst.MatchValue(cst.SimpleString('"foo"')), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + ], + ), + "code": "match x:\n" + + " case None: pass\n" + + ' case "foo": pass\n', + "parser": parser, + }, + # List patterns + { + "node": cst.Match( + subject=cst.Name("x"), + cases=[ + cst.MatchCase( # empty list + pattern=cst.MatchList( + [], + lbracket=cst.LeftSquareBracket(), + rbracket=cst.RightSquareBracket(), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # single element list + pattern=cst.MatchList( + [ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")) + ) + ], + lbracket=cst.LeftSquareBracket(), + rbracket=cst.RightSquareBracket(), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # single element list with trailing comma + pattern=cst.MatchList( + [ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + cst.Comma(), + ) + ], + lbracket=cst.LeftSquareBracket(), + rbracket=cst.RightSquareBracket(), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + ], + ), + "code": ( + "match x:\n" + + " case []: pass\n" + + " case [None]: pass\n" + + " case [None,]: pass\n" + ), + "parser": parser, + }, + # Tuple patterns + { + "node": cst.Match( + subject=cst.Name("x"), + cases=[ + cst.MatchCase( # empty tuple + pattern=cst.MatchTuple( + [], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # two element tuple + pattern=cst.MatchTuple( + [ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + cst.Comma(), + ), + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + ), + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # single element tuple with trailing comma + pattern=cst.MatchTuple( + [ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + cst.Comma(), + ) + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # two element tuple + pattern=cst.MatchTuple( + [ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + cst.Comma(), + ), + cst.MatchStar( + comma=cst.Comma(), + ), + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + ), + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + ], + ), + "code": ( + "match x:\n" + + " case (): pass\n" + + " case (None,None): pass\n" + + " case (None,): pass\n" + + " case (None,*_,None): pass\n" + ), + "parser": parser, + }, + # Mapping patterns + { + "node": cst.Match( + subject=cst.Name("x"), + cases=[ + cst.MatchCase( # empty mapping + pattern=cst.MatchMapping( + [], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # two element mapping + pattern=cst.MatchMapping( + [ + cst.MatchMappingElement( + key=cst.SimpleString('"a"'), + pattern=cst.MatchSingleton(cst.Name("None")), + comma=cst.Comma(), + ), + cst.MatchMappingElement( + key=cst.SimpleString('"b"'), + pattern=cst.MatchSingleton(cst.Name("None")), + ), + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # single element mapping with trailing comma + pattern=cst.MatchMapping( + [ + cst.MatchMappingElement( + key=cst.SimpleString('"a"'), + pattern=cst.MatchSingleton(cst.Name("None")), + comma=cst.Comma(), + ) + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # rest + pattern=cst.MatchMapping( + rest=cst.Name("rest"), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + ], + ), + "code": ( + "match x:\n" + + " case {}: pass\n" + + ' case {"a": None,"b": None}: pass\n' + + ' case {"a": None,}: pass\n' + + " case {**rest}: pass\n" + ), + "parser": parser, + }, + # Class patterns + { + "node": cst.Match( + subject=cst.Name("x"), + cases=[ + cst.MatchCase( # empty class + pattern=cst.MatchClass( + cls=cst.Attribute(cst.Name("a"), cst.Name("b")), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # single pattern class + pattern=cst.MatchClass( + cls=cst.Attribute(cst.Name("a"), cst.Name("b")), + patterns=[ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")) + ) + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # single pattern class with trailing comma + pattern=cst.MatchClass( + cls=cst.Attribute(cst.Name("a"), cst.Name("b")), + patterns=[ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + comma=cst.Comma(), + ) + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # single keyword pattern class + pattern=cst.MatchClass( + cls=cst.Attribute(cst.Name("a"), cst.Name("b")), + kwds=[ + cst.MatchKeywordElement( + key=cst.Name("foo"), + pattern=cst.MatchSingleton(cst.Name("None")), + ) + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # single keyword pattern class with trailing comma + pattern=cst.MatchClass( + cls=cst.Attribute(cst.Name("a"), cst.Name("b")), + kwds=[ + cst.MatchKeywordElement( + key=cst.Name("foo"), + pattern=cst.MatchSingleton(cst.Name("None")), + comma=cst.Comma(), + ) + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( # now all at once + pattern=cst.MatchClass( + cls=cst.Attribute(cst.Name("a"), cst.Name("b")), + patterns=[ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + cst.Comma(), + ), + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")), + cst.Comma(), + ), + ], + kwds=[ + cst.MatchKeywordElement( + key=cst.Name("foo"), + pattern=cst.MatchSingleton(cst.Name("None")), + comma=cst.Comma(), + ), + cst.MatchKeywordElement( + key=cst.Name("bar"), + pattern=cst.MatchSingleton(cst.Name("None")), + comma=cst.Comma(), + ), + ], + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + ], + ), + "code": ( + "match x:\n" + + " case a.b(): pass\n" + + " case a.b(None): pass\n" + + " case a.b(None,): pass\n" + + " case a.b(foo=None): pass\n" + + " case a.b(foo=None,): pass\n" + + " case a.b(None,None,foo=None,bar=None,): pass\n" + ), + "parser": parser, + }, + # as pattern + { + "node": cst.Match( + subject=cst.Name("x"), + cases=[ + cst.MatchCase( + pattern=cst.MatchAs(), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( + pattern=cst.MatchAs(name=cst.Name("foo")), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( + pattern=cst.MatchAs( + pattern=cst.MatchSingleton(cst.Name("None")), + name=cst.Name("bar"), + whitespace_before_as=cst.SimpleWhitespace(" "), + whitespace_after_as=cst.SimpleWhitespace(" "), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + ], + ), + "code": "match x:\n" + + " case _: pass\n" + + " case foo: pass\n" + + " case None as bar: pass\n", + "parser": parser, + }, + # or pattern + { + "node": cst.Match( + subject=cst.Name("x"), + cases=[ + cst.MatchCase( + pattern=cst.MatchOr( + [ + cst.MatchOrElement( + cst.MatchSingleton(cst.Name("None")), + cst.BitOr(), + ), + cst.MatchOrElement( + cst.MatchSingleton(cst.Name("False")), + cst.BitOr(), + ), + cst.MatchOrElement( + cst.MatchSingleton(cst.Name("True")) + ), + ] + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ) + ], + ), + "code": "match x:\n case None | False | True: pass\n", + "parser": parser, + }, + { # exercise sentinels + "node": cst.Match( + subject=cst.Name("x"), + cases=[ + cst.MatchCase( + pattern=cst.MatchList( + [cst.MatchStar(), cst.MatchStar()], + lbracket=None, + rbracket=None, + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( + pattern=cst.MatchTuple( + [ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")) + ) + ] + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( + pattern=cst.MatchAs( + pattern=cst.MatchTuple( + [ + cst.MatchSequenceElement( + cst.MatchSingleton(cst.Name("None")) + ) + ] + ), + name=cst.Name("bar"), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + cst.MatchCase( + pattern=cst.MatchOr( + [ + cst.MatchOrElement( + cst.MatchSingleton(cst.Name("None")), + ), + cst.MatchOrElement( + cst.MatchSingleton(cst.Name("False")), + ), + cst.MatchOrElement( + cst.MatchSingleton(cst.Name("True")) + ), + ] + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + ], + ), + "code": "match x:\n" + + " case *_, *_: pass\n" + + " case (None,): pass\n" + + " case (None,) as bar: pass\n" + + " case None | False | True: pass\n", + "parser": None, + }, + ) + ) + def test_valid(self, **kwargs: Any) -> None: + self.validate_node(**kwargs) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 0cc05ff4..cea085d0 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -159,6 +159,23 @@ if TYPE_CHECKING: ImportAlias, ImportFrom, IndentedBlock, + Match, + MatchAs, + MatchCase, + MatchClass, + MatchKeywordElement, + MatchList, + MatchMapping, + MatchMappingElement, + MatchOr, + MatchOrElement, + MatchPattern, + MatchSequence, + MatchSequenceElement, + MatchSingleton, + MatchStar, + MatchTuple, + MatchValue, NameItem, Nonlocal, Pass, @@ -3134,6 +3151,636 @@ class CSTTypedBaseFunctions: def leave_ListComp_rpar(self, node: "ListComp") -> None: pass + @mark_no_op + def visit_Match(self, node: "Match") -> Optional[bool]: + pass + + @mark_no_op + def visit_Match_subject(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_subject(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_cases(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_cases(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_leading_lines(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_leading_lines(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_after_match(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_after_match(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_before_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_before_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_after_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_after_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_indent(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_indent(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_footer(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_footer(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_MatchAs(self, node: "MatchAs") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchAs_pattern(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_pattern(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_name(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_name(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_lpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_lpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_rpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_rpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchCase(self, node: "MatchCase") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchCase_pattern(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_pattern(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_body(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_body(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_guard(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_guard(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_leading_lines(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_leading_lines(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchClass(self, node: "MatchClass") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchClass_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_lpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_lpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_rpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_rpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement(self, node: "MatchKeywordElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_whitespace_before_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_whitespace_before_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_whitespace_after_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_whitespace_after_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchList(self, node: "MatchList") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchList_patterns(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_patterns(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_lbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_lbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_rbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_rbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_lpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_lpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_rpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_rpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchMapping(self, node: "MatchMapping") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchMapping_elements(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_elements(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_lbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_lbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_lpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_lpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement(self, node: "MatchMappingElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_whitespace_before_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_whitespace_before_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_whitespace_after_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_whitespace_after_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchOr(self, node: "MatchOr") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchOr_patterns(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_patterns(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOr_lpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_lpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOr_rpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_rpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOrElement(self, node: "MatchOrElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def visit_MatchOrElement_separator(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchOrElement_separator(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def visit_MatchPattern(self, node: "MatchPattern") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequence(self, node: "MatchSequence") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequenceElement( + self, node: "MatchSequenceElement" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def visit_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def visit_MatchSingleton(self, node: "MatchSingleton") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSingleton_value(self, node: "MatchSingleton") -> None: + pass + + @mark_no_op + def leave_MatchSingleton_value(self, node: "MatchSingleton") -> None: + pass + + @mark_no_op + def visit_MatchStar(self, node: "MatchStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchStar_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchStar_comma(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_comma(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchTuple(self, node: "MatchTuple") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchTuple_patterns(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_patterns(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchTuple_lpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_lpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchTuple_rpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_rpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchValue(self, node: "MatchValue") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchValue_value(self, node: "MatchValue") -> None: + pass + + @mark_no_op + def leave_MatchValue_value(self, node: "MatchValue") -> None: + pass + @mark_no_op def visit_MatrixMultiply(self, node: "MatrixMultiply") -> Optional[bool]: pass @@ -5172,6 +5819,74 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_ListComp(self, original_node: "ListComp") -> None: pass + @mark_no_op + def leave_Match(self, original_node: "Match") -> None: + pass + + @mark_no_op + def leave_MatchAs(self, original_node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchCase(self, original_node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchClass(self, original_node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement(self, original_node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchList(self, original_node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchMapping(self, original_node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement(self, original_node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchOr(self, original_node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOrElement(self, original_node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchPattern(self, original_node: "MatchPattern") -> None: + pass + + @mark_no_op + def leave_MatchSequence(self, original_node: "MatchSequence") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement(self, original_node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSingleton(self, original_node: "MatchSingleton") -> None: + pass + + @mark_no_op + def leave_MatchStar(self, original_node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchTuple(self, original_node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchValue(self, original_node: "MatchValue") -> None: + pass + @mark_no_op def leave_MatrixMultiply(self, original_node: "MatrixMultiply") -> None: pass @@ -5521,7 +6236,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_BitOr( self, original_node: "BitOr", updated_node: "BitOr" - ) -> "BaseBinaryOp": + ) -> Union["BaseBinaryOp", MaybeSentinel]: return updated_node @mark_no_op @@ -5956,6 +6671,116 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> "BaseExpression": return updated_node + @mark_no_op + def leave_Match( + self, original_node: "Match", updated_node: "Match" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_MatchAs( + self, original_node: "MatchAs", updated_node: "MatchAs" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchCase( + self, original_node: "MatchCase", updated_node: "MatchCase" + ) -> "MatchCase": + return updated_node + + @mark_no_op + def leave_MatchClass( + self, original_node: "MatchClass", updated_node: "MatchClass" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchKeywordElement( + self, original_node: "MatchKeywordElement", updated_node: "MatchKeywordElement" + ) -> Union[ + "MatchKeywordElement", FlattenSentinel["MatchKeywordElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchList( + self, original_node: "MatchList", updated_node: "MatchList" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchMapping( + self, original_node: "MatchMapping", updated_node: "MatchMapping" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchMappingElement( + self, original_node: "MatchMappingElement", updated_node: "MatchMappingElement" + ) -> Union[ + "MatchMappingElement", FlattenSentinel["MatchMappingElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchOr( + self, original_node: "MatchOr", updated_node: "MatchOr" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchOrElement( + self, original_node: "MatchOrElement", updated_node: "MatchOrElement" + ) -> Union["MatchOrElement", FlattenSentinel["MatchOrElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_MatchPattern( + self, original_node: "MatchPattern", updated_node: "MatchPattern" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchSequence( + self, original_node: "MatchSequence", updated_node: "MatchSequence" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchSequenceElement( + self, + original_node: "MatchSequenceElement", + updated_node: "MatchSequenceElement", + ) -> Union[ + "MatchSequenceElement", FlattenSentinel["MatchSequenceElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchSingleton( + self, original_node: "MatchSingleton", updated_node: "MatchSingleton" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchStar( + self, original_node: "MatchStar", updated_node: "MatchStar" + ) -> "MatchStar": + return updated_node + + @mark_no_op + def leave_MatchTuple( + self, original_node: "MatchTuple", updated_node: "MatchTuple" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchValue( + self, original_node: "MatchValue", updated_node: "MatchValue" + ) -> "MatchPattern": + return updated_node + @mark_no_op def leave_MatrixMultiply( self, original_node: "MatrixMultiply", updated_node: "MatrixMultiply" diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index cd239a92..655bc947 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -8398,6 +8398,2368 @@ class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNo ] = DoNotCare() +MatchCaseMatchType = Union["MatchCase", MetadataMatchType, MatchIfTrue[cst.MatchCase]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Match(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + subject: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + cases: Union[ + Sequence[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchCase]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchCase]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchCase]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_match: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + indent: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + footer: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchAs(BaseMatcherNode): + pattern: Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + ] + ], + AllOf[ + Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + ] + ], + ] = DoNotCare() + name: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + whitespace_before_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchPatternMatchType = Union[ + "MatchPattern", MetadataMatchType, MatchIfTrue[cst.MatchPattern] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchCase(BaseMatcherNode): + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + guard: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_case: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_if: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_if: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchSequenceElementMatchType = Union[ + "MatchSequenceElement", MetadataMatchType, MatchIfTrue[cst.MatchSequenceElement] +] +MatchKeywordElementMatchType = Union[ + "MatchKeywordElement", MetadataMatchType, MatchIfTrue[cst.MatchKeywordElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchClass(BaseMatcherNode): + cls: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + ] + ], + ] = DoNotCare() + kwds: Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + ] + ], + ] = DoNotCare() + whitespace_after_cls: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_patterns: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_kwds: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchKeywordElement(BaseMatcherNode): + key: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_equal: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_equal: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchSequenceElementOrMatchStarMatchType = Union[ + "MatchSequenceElement", + "MatchStar", + MetadataMatchType, + MatchIfTrue[Union[cst.MatchSequenceElement, cst.MatchStar]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchList(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + ] = DoNotCare() + lbracket: Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + ] + ], + AllOf[ + Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + ] + ], + ] = DoNotCare() + rbracket: Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + ] + ], + AllOf[ + Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchMappingElementMatchType = Union[ + "MatchMappingElement", MetadataMatchType, MatchIfTrue[cst.MatchMappingElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchMapping(BaseMatcherNode): + elements: Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchMappingElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchMappingElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchMappingElement]], + ] + ], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + rest: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + whitespace_before_rest: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + trailing_comma: Union[ + Optional["Comma"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comma]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] + ] + ], + AllOf[ + Union[ + Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchMappingElement(BaseMatcherNode): + key: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchOrElementMatchType = Union[ + "MatchOrElement", MetadataMatchType, MatchIfTrue[cst.MatchOrElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchOr(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchOrElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchOrElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchOrElement]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BitOrMatchType = Union["BitOr", MetadataMatchType, MatchIfTrue[cst.BitOr]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchOrElement(BaseMatcherNode): + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + separator: Union[ + BitOrMatchType, DoNotCareSentinel, OneOf[BitOrMatchType], AllOf[BitOrMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchPattern(BaseMatcherNode): + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSequence(BaseMatcherNode): + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSequenceElement(BaseMatcherNode): + value: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSingleton(BaseMatcherNode): + value: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchStar(BaseMatcherNode): + name: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_name: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchTuple(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchValue(BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + @dataclass(frozen=True, eq=False, unsafe_hash=False) class MatrixMultiply(BaseBinaryOp, BaseMatcherNode): whitespace_before: Union[ @@ -13417,11 +15779,28 @@ __all__ = [ "LessThanEqual", "List", "ListComp", + "Match", + "MatchAs", + "MatchCase", + "MatchClass", "MatchDecoratorMismatch", "MatchIfTrue", + "MatchKeywordElement", + "MatchList", + "MatchMapping", + "MatchMappingElement", "MatchMetadata", "MatchMetadataIfTrue", + "MatchOr", + "MatchOrElement", + "MatchPattern", "MatchRegex", + "MatchSequence", + "MatchSequenceElement", + "MatchSingleton", + "MatchStar", + "MatchTuple", + "MatchValue", "MatcherDecoratableTransformer", "MatcherDecoratableVisitor", "MatrixMultiply", diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index e10f47cf..bc8e9042 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -155,6 +155,23 @@ from libcst._nodes.statement import ( ImportAlias, ImportFrom, IndentedBlock, + Match, + MatchAs, + MatchCase, + MatchClass, + MatchKeywordElement, + MatchList, + MatchMapping, + MatchMappingElement, + MatchOr, + MatchOrElement, + MatchPattern, + MatchSequence, + MatchSequenceElement, + MatchSingleton, + MatchStar, + MatchTuple, + MatchValue, NameItem, Nonlocal, Pass, @@ -200,7 +217,7 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { BitAnd: BaseBinaryOp, BitAndAssign: BaseAugOp, BitInvert: BaseUnaryOp, - BitOr: BaseBinaryOp, + BitOr: Union[BaseBinaryOp, MaybeSentinel], BitOrAssign: BaseAugOp, BitXor: BaseBinaryOp, BitXorAssign: BaseAugOp, @@ -270,6 +287,23 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { LessThanEqual: BaseCompOp, List: BaseExpression, ListComp: BaseExpression, + Match: Union[BaseStatement, RemovalSentinel], + MatchAs: MatchPattern, + MatchCase: MatchCase, + MatchClass: MatchPattern, + MatchKeywordElement: Union[MatchKeywordElement, RemovalSentinel], + MatchList: MatchPattern, + MatchMapping: MatchPattern, + MatchMappingElement: Union[MatchMappingElement, RemovalSentinel], + MatchOr: MatchPattern, + MatchOrElement: Union[MatchOrElement, RemovalSentinel], + MatchPattern: MatchPattern, + MatchSequence: MatchPattern, + MatchSequenceElement: Union[MatchSequenceElement, RemovalSentinel], + MatchSingleton: MatchPattern, + MatchStar: MatchStar, + MatchTuple: MatchPattern, + MatchValue: MatchPattern, MatrixMultiply: BaseBinaryOp, MatrixMultiplyAssign: BaseAugOp, Minus: BaseUnaryOp, diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index d30e5549..b6be09df 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -13,8 +13,11 @@ pub use statement::{ AnnAssign, Annotation, AsName, Assert, Assign, AssignTarget, AssignTargetExpression, AugAssign, Break, ClassDef, CompoundStatement, Continue, Decorator, Del, DelTargetExpression, Else, ExceptHandler, ExceptStarHandler, Expr, Finally, For, FunctionDef, Global, If, Import, - ImportAlias, ImportFrom, ImportNames, IndentedBlock, NameItem, Nonlocal, OrElse, Pass, Raise, - Return, SimpleStatementLine, SimpleStatementSuite, SmallStatement, Statement, Suite, Try, + ImportAlias, ImportFrom, ImportNames, IndentedBlock, Match, MatchAs, MatchCase, MatchClass, + MatchKeywordElement, MatchList, MatchMapping, MatchMappingElement, MatchOr, MatchOrElement, + MatchPattern, MatchSequence, MatchSequenceElement, MatchSingleton, MatchStar, MatchTuple, + MatchValue, NameItem, Nonlocal, OrElse, Pass, Raise, Return, SimpleStatementLine, + SimpleStatementSuite, SmallStatement, StarrableMatchSequenceElement, Statement, Suite, Try, TryStar, While, With, WithItem, }; @@ -32,8 +35,8 @@ pub use expression::{ mod op; pub use op::{ - AssignEqual, AugOp, BinaryOp, BooleanOp, Colon, Comma, CompOp, Dot, ImportStar, Semicolon, - UnaryOp, + AssignEqual, AugOp, BinaryOp, BitOr, BooleanOp, Colon, Comma, CompOp, Dot, ImportStar, + Semicolon, UnaryOp, }; mod module; diff --git a/native/libcst/src/nodes/op.rs b/native/libcst/src/nodes/op.rs index fa031d4c..48b9839f 100644 --- a/native/libcst/src/nodes/op.rs +++ b/native/libcst/src/nodes/op.rs @@ -1418,3 +1418,33 @@ impl<'a> Codegen<'a> for AugOp<'a> { aft.codegen(state); } } + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct BitOr<'a> { + pub whitespace_before: ParenthesizableWhitespace<'a>, + pub whitespace_after: ParenthesizableWhitespace<'a>, + + pub(crate) tok: TokenRef<'a>, +} + +impl<'a> Inflate<'a> for BitOr<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + Ok(self) + } +} + +impl<'a> Codegen<'a> for BitOr<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.whitespace_before.codegen(state); + state.add_token("|"); + self.whitespace_after.codegen(state); + } +} diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 7f717353..7d7da873 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -14,7 +14,7 @@ use super::{ use crate::{ nodes::{ traits::{Inflate, Result, WithComma, WithLeadingLines}, - Arg, AssignEqual, Asynchronous, AugOp, Element, ParenthesizedNode, + Arg, AssignEqual, Asynchronous, AugOp, BitOr, Element, ParenthesizedNode, }, tokenizer::{ whitespace_parser::{ @@ -23,6 +23,7 @@ use crate::{ }, Token, }, + LeftCurlyBrace, LeftSquareBracket, RightCurlyBrace, RightSquareBracket, }; use libcst_derive::{Codegen, Inflate, IntoPy, ParenthesizedNode}; @@ -55,6 +56,7 @@ pub enum CompoundStatement<'a> { Try(Try<'a>), TryStar(TryStar<'a>), With(With<'a>), + Match(Match<'a>), } impl<'a> WithLeadingLines<'a> for CompoundStatement<'a> { @@ -68,6 +70,7 @@ impl<'a> WithLeadingLines<'a> for CompoundStatement<'a> { Self::Try(t) => &mut t.leading_lines, Self::TryStar(t) => &mut t.leading_lines, Self::With(w) => &mut w.leading_lines, + Self::Match(m) => &mut m.leading_lines, } } } @@ -2100,3 +2103,796 @@ impl<'a> Del<'a> { Self { semicolon, ..self } } } + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct Match<'a> { + pub subject: Expression<'a>, + pub cases: Vec>, + + pub leading_lines: Vec>, + pub whitespace_after_match: SimpleWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + pub whitespace_after_colon: TrailingWhitespace<'a>, + pub indent: Option<&'a str>, + pub footer: Vec>, + + pub(crate) match_tok: TokenRef<'a>, + pub(crate) colon_tok: TokenRef<'a>, + pub(crate) indent_tok: TokenRef<'a>, + pub(crate) dedent_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for Match<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for l in &self.leading_lines { + l.codegen(state); + } + state.add_indent(); + state.add_token("match"); + self.whitespace_after_match.codegen(state); + self.subject.codegen(state); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.whitespace_after_colon.codegen(state); + + let indent = self.indent.unwrap_or(state.default_indent); + state.indent(indent); + + // Note: empty cases is a syntax error + for c in &self.cases { + c.codegen(state); + } + + for f in &self.footer { + f.codegen(state); + } + state.dedent(); + } +} + +impl<'a> Inflate<'a> for Match<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut self.match_tok.whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_after_match = + parse_simple_whitespace(config, &mut self.match_tok.whitespace_after.borrow_mut())?; + self.subject = self.subject.inflate(config)?; + self.whitespace_before_colon = + parse_simple_whitespace(config, &mut self.colon_tok.whitespace_before.borrow_mut())?; + self.whitespace_after_colon = + parse_trailing_whitespace(config, &mut self.colon_tok.whitespace_after.borrow_mut())?; + self.indent = self.indent_tok.relative_indent; + if self.indent == Some(config.default_indent) { + self.indent = None; + } + self.cases = self.cases.inflate(config)?; + // See note about footers in `IndentedBlock`'s inflate fn + self.footer = parse_empty_lines( + config, + &mut self.dedent_tok.whitespace_after.borrow_mut(), + Some(self.indent_tok.whitespace_before.borrow().absolute_indent), + )?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct MatchCase<'a> { + pub pattern: MatchPattern<'a>, + pub guard: Option>, + pub body: Suite<'a>, + + pub leading_lines: Vec>, + pub whitespace_after_case: SimpleWhitespace<'a>, + pub whitespace_before_if: SimpleWhitespace<'a>, + pub whitespace_after_if: SimpleWhitespace<'a>, + pub whitespace_before_colon: SimpleWhitespace<'a>, + + pub(crate) case_tok: TokenRef<'a>, + pub(crate) if_tok: Option>, + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for MatchCase<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + for l in &self.leading_lines { + l.codegen(state); + } + state.add_indent(); + state.add_token("case"); + self.whitespace_after_case.codegen(state); + self.pattern.codegen(state); + if let Some(guard) = &self.guard { + self.whitespace_before_if.codegen(state); + state.add_token("if"); + self.whitespace_after_if.codegen(state); + guard.codegen(state); + } + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.body.codegen(state); + } +} + +impl<'a> Inflate<'a> for MatchCase<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( + config, + &mut self.case_tok.whitespace_before.borrow_mut(), + None, + )?; + self.whitespace_after_case = + parse_simple_whitespace(config, &mut self.case_tok.whitespace_after.borrow_mut())?; + self.pattern = self.pattern.inflate(config)?; + if let Some(if_tok) = self.if_tok.as_mut() { + self.whitespace_before_if = + parse_simple_whitespace(config, &mut if_tok.whitespace_before.borrow_mut())?; + self.whitespace_after_if = + parse_simple_whitespace(config, &mut if_tok.whitespace_after.borrow_mut())?; + + self.guard = self.guard.inflate(config)?; + } + self.whitespace_before_colon = + parse_simple_whitespace(config, &mut self.colon_tok.whitespace_before.borrow_mut())?; + self.body = self.body.inflate(config)?; + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, Codegen, Inflate, ParenthesizedNode)] +pub enum MatchPattern<'a> { + Value(MatchValue<'a>), + Singleton(MatchSingleton<'a>), + Sequence(MatchSequence<'a>), + Mapping(MatchMapping<'a>), + Class(MatchClass<'a>), + As(Box>), + Or(Box>), +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct MatchValue<'a> { + pub value: Expression<'a>, +} + +impl<'a> ParenthesizedNode<'a> for MatchValue<'a> { + fn lpar(&self) -> &Vec> { + self.value.lpar() + } + fn rpar(&self) -> &Vec> { + self.value.rpar() + } + fn parenthesize(&self, state: &mut CodegenState<'a>, f: F) + where + F: FnOnce(&mut CodegenState<'a>), + { + self.value.parenthesize(state, f) + } + fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { + Self { + value: self.value.with_parens(left, right), + } + } +} + +impl<'a> Codegen<'a> for MatchValue<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.value.codegen(state) + } +} + +impl<'a> Inflate<'a> for MatchValue<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.value = self.value.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct MatchSingleton<'a> { + pub value: Name<'a>, +} + +impl<'a> ParenthesizedNode<'a> for MatchSingleton<'a> { + fn lpar(&self) -> &Vec> { + self.value.lpar() + } + fn rpar(&self) -> &Vec> { + self.value.rpar() + } + fn parenthesize(&self, state: &mut CodegenState<'a>, f: F) + where + F: FnOnce(&mut CodegenState<'a>), + { + self.value.parenthesize(state, f) + } + fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { + Self { + value: self.value.with_parens(left, right), + } + } +} + +impl<'a> Codegen<'a> for MatchSingleton<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.value.codegen(state) + } +} + +impl<'a> Inflate<'a> for MatchSingleton<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.value = self.value.inflate(config)?; + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, Codegen, Inflate, ParenthesizedNode)] +pub enum MatchSequence<'a> { + MatchList(MatchList<'a>), + MatchTuple(MatchTuple<'a>), +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +pub struct MatchList<'a> { + pub patterns: Vec>, + pub lbracket: Option>, + pub rbracket: Option>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for MatchList<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.lbracket.codegen(state); + let len = self.patterns.len(); + if len == 1 { + self.patterns.first().unwrap().codegen(state, false, false); + } else { + for (idx, pat) in self.patterns.iter().enumerate() { + pat.codegen(state, idx < len - 1, true); + } + } + self.rbracket.codegen(state); + }) + } +} + +impl<'a> Inflate<'a> for MatchList<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbracket = self.lbracket.inflate(config)?; + + let len = self.patterns.len(); + self.patterns = self + .patterns + .into_iter() + .enumerate() + .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) + .collect::>>()?; + + self.rbracket = self.rbracket.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +pub struct MatchTuple<'a> { + pub patterns: Vec>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for MatchTuple<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + let len = self.patterns.len(); + if len == 1 { + self.patterns.first().unwrap().codegen(state, true, false); + } else { + for (idx, pat) in self.patterns.iter().enumerate() { + pat.codegen(state, idx < len - 1, true); + } + } + }) + } +} + +impl<'a> Inflate<'a> for MatchTuple<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + let len = self.patterns.len(); + self.patterns = self + .patterns + .into_iter() + .enumerate() + .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) + .collect::>>()?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub enum StarrableMatchSequenceElement<'a> { + Simple(MatchSequenceElement<'a>), + Starred(MatchStar<'a>), +} + +impl<'a> StarrableMatchSequenceElement<'a> { + fn codegen( + &self, + state: &mut CodegenState<'a>, + default_comma: bool, + default_comma_whitespace: bool, + ) { + match &self { + Self::Simple(s) => s.codegen(state, default_comma, default_comma_whitespace), + Self::Starred(s) => s.codegen(state, default_comma, default_comma_whitespace), + } + } + fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result { + Ok(match self { + Self::Simple(s) => Self::Simple(s.inflate_element(config, last_element)?), + Self::Starred(s) => Self::Starred(s.inflate_element(config, last_element)?), + }) + } +} + +impl<'a> WithComma<'a> for StarrableMatchSequenceElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { + match self { + Self::Simple(s) => Self::Simple(s.with_comma(comma)), + Self::Starred(s) => Self::Starred(s.with_comma(comma)), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct MatchSequenceElement<'a> { + pub value: MatchPattern<'a>, + pub comma: Option>, +} + +impl<'a> MatchSequenceElement<'a> { + fn codegen( + &self, + state: &mut CodegenState<'a>, + default_comma: bool, + default_comma_whitespace: bool, + ) { + self.value.codegen(state); + self.comma.codegen(state); + if self.comma.is_none() && default_comma { + state.add_token(if default_comma_whitespace { ", " } else { "," }); + } + } + + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.value = self.value.inflate(config)?; + self.comma = if last_element { + self.comma.map(|c| c.inflate_before(config)).transpose() + } else { + self.comma.inflate(config) + }?; + Ok(self) + } +} + +impl<'a> WithComma<'a> for MatchSequenceElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { + Self { + comma: Some(comma), + ..self + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct MatchStar<'a> { + pub name: Option>, + pub comma: Option>, + pub whitespace_before_name: ParenthesizableWhitespace<'a>, + + pub(crate) star_tok: TokenRef<'a>, +} + +impl<'a> MatchStar<'a> { + fn codegen( + &self, + state: &mut CodegenState<'a>, + default_comma: bool, + default_comma_whitespace: bool, + ) { + state.add_token("*"); + self.whitespace_before_name.codegen(state); + if let Some(name) = &self.name { + name.codegen(state); + } else { + state.add_token("_"); + } + self.comma.codegen(state); + if self.comma.is_none() && default_comma { + state.add_token(if default_comma_whitespace { ", " } else { "," }); + } + } + + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.whitespace_before_name = parse_parenthesizable_whitespace( + config, + &mut self.star_tok.whitespace_after.borrow_mut(), + )?; + self.name = self.name.inflate(config)?; + self.comma = if last_element { + self.comma.map(|c| c.inflate_before(config)).transpose() + } else { + self.comma.inflate(config) + }?; + Ok(self) + } +} + +impl<'a> WithComma<'a> for MatchStar<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { + Self { + comma: Some(comma), + ..self + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +pub struct MatchMapping<'a> { + pub elements: Vec>, + pub rest: Option>, + pub trailing_comma: Option>, + pub lbrace: LeftCurlyBrace<'a>, + pub rbrace: RightCurlyBrace<'a>, + pub lpar: Vec>, + pub rpar: Vec>, + + pub whitespace_before_rest: SimpleWhitespace<'a>, + + pub(crate) star_tok: Option>, +} + +impl<'a> Codegen<'a> for MatchMapping<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.lbrace.codegen(state); + let len = self.elements.len(); + for (idx, el) in self.elements.iter().enumerate() { + el.codegen(state, self.rest.is_some() || idx < len - 1); + } + if let Some(rest) = &self.rest { + state.add_token("**"); + self.whitespace_before_rest.codegen(state); + rest.codegen(state); + self.trailing_comma.codegen(state); + } + self.rbrace.codegen(state); + }) + } +} + +impl<'a> Inflate<'a> for MatchMapping<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; + + let len = self.elements.len(); + let no_star = self.star_tok.is_none(); + self.elements = self + .elements + .into_iter() + .enumerate() + .map(|(idx, el)| el.inflate_element(config, no_star && idx + 1 == len)) + .collect::>>()?; + + if let Some(star_tok) = self.star_tok.as_mut() { + self.whitespace_before_rest = + parse_simple_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())?; + self.rest = self.rest.inflate(config)?; + self.trailing_comma = self + .trailing_comma + .map(|c| c.inflate_before(config)) + .transpose()?; + } + + self.rbrace = self.rbrace.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct MatchMappingElement<'a> { + pub key: Expression<'a>, + pub pattern: MatchPattern<'a>, + pub comma: Option>, + + pub whitespace_before_colon: ParenthesizableWhitespace<'a>, + pub whitespace_after_colon: ParenthesizableWhitespace<'a>, + + pub(crate) colon_tok: TokenRef<'a>, +} + +impl<'a> MatchMappingElement<'a> { + fn codegen(&self, state: &mut CodegenState<'a>, default_comma: bool) { + self.key.codegen(state); + self.whitespace_before_colon.codegen(state); + state.add_token(":"); + self.whitespace_after_colon.codegen(state); + self.pattern.codegen(state); + self.comma.codegen(state); + if self.comma.is_none() && default_comma { + state.add_token(", "); + } + } + + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.key = self.key.inflate(config)?; + self.whitespace_before_colon = parse_parenthesizable_whitespace( + config, + &mut self.colon_tok.whitespace_before.borrow_mut(), + )?; + self.whitespace_after_colon = parse_parenthesizable_whitespace( + config, + &mut self.colon_tok.whitespace_after.borrow_mut(), + )?; + self.pattern = self.pattern.inflate(config)?; + self.comma = if last_element { + self.comma.map(|c| c.inflate_before(config)).transpose() + } else { + self.comma.inflate(config) + }?; + Ok(self) + } +} + +impl<'a> WithComma<'a> for MatchMappingElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { + Self { + comma: Some(comma), + ..self + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +pub struct MatchClass<'a> { + pub cls: NameOrAttribute<'a>, + pub patterns: Vec>, + pub kwds: Vec>, + pub lpar: Vec>, + pub rpar: Vec>, + + pub whitespace_after_cls: ParenthesizableWhitespace<'a>, + pub whitespace_before_patterns: ParenthesizableWhitespace<'a>, + pub whitespace_after_kwds: ParenthesizableWhitespace<'a>, + + pub(crate) lpar_tok: TokenRef<'a>, + pub(crate) rpar_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for MatchClass<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + self.cls.codegen(state); + self.whitespace_after_cls.codegen(state); + state.add_token("("); + self.whitespace_before_patterns.codegen(state); + let patlen = self.patterns.len(); + let kwdlen = self.kwds.len(); + for (idx, pat) in self.patterns.iter().enumerate() { + pat.codegen(state, idx < patlen - 1 + kwdlen, patlen == 1 && kwdlen == 0); + } + for (idx, kwd) in self.kwds.iter().enumerate() { + kwd.codegen(state, idx < kwdlen - 1); + } + self.whitespace_after_kwds.codegen(state); + state.add_token(")"); + }) + } +} + +impl<'a> Inflate<'a> for MatchClass<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + + self.cls = self.cls.inflate(config)?; + self.whitespace_after_cls = parse_parenthesizable_whitespace( + config, + &mut self.lpar_tok.whitespace_before.borrow_mut(), + )?; + self.whitespace_before_patterns = parse_parenthesizable_whitespace( + config, + &mut self.lpar_tok.whitespace_after.borrow_mut(), + )?; + + let patlen = self.patterns.len(); + let kwdlen = self.kwds.len(); + self.patterns = self + .patterns + .into_iter() + .enumerate() + .map(|(idx, pat)| pat.inflate_element(config, idx + 1 == patlen + kwdlen)) + .collect::>()?; + self.kwds = self + .kwds + .into_iter() + .enumerate() + .map(|(idx, kwd)| kwd.inflate_element(config, idx + 1 == kwdlen)) + .collect::>()?; + + self.whitespace_after_kwds = parse_parenthesizable_whitespace( + config, + &mut self.rpar_tok.whitespace_before.borrow_mut(), + )?; + + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct MatchKeywordElement<'a> { + pub key: Name<'a>, + pub pattern: MatchPattern<'a>, + pub comma: Option>, + + pub whitespace_before_equal: ParenthesizableWhitespace<'a>, + pub whitespace_after_equal: ParenthesizableWhitespace<'a>, + + pub(crate) equal_tok: TokenRef<'a>, +} + +impl<'a> MatchKeywordElement<'a> { + fn codegen(&self, state: &mut CodegenState<'a>, default_comma: bool) { + self.key.codegen(state); + self.whitespace_before_equal.codegen(state); + state.add_token("="); + self.whitespace_after_equal.codegen(state); + self.pattern.codegen(state); + self.comma.codegen(state); + if self.comma.is_none() && default_comma { + state.add_token(", "); + } + } + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.key = self.key.inflate(config)?; + self.whitespace_before_equal = parse_parenthesizable_whitespace( + config, + &mut self.equal_tok.whitespace_before.borrow_mut(), + )?; + self.whitespace_after_equal = parse_parenthesizable_whitespace( + config, + &mut self.equal_tok.whitespace_after.borrow_mut(), + )?; + self.pattern = self.pattern.inflate(config)?; + self.comma = if last_element { + self.comma.map(|c| c.inflate_before(config)).transpose() + } else { + self.comma.inflate(config) + }?; + Ok(self) + } +} + +impl<'a> WithComma<'a> for MatchKeywordElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { + Self { + comma: Some(comma), + ..self + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +pub struct MatchAs<'a> { + pub pattern: Option>, + pub name: Option>, + pub lpar: Vec>, + pub rpar: Vec>, + + pub whitespace_before_as: Option>, + pub whitespace_after_as: Option>, + + pub(crate) as_tok: Option>, +} + +impl<'a> Codegen<'a> for MatchAs<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + if let Some(pat) = &self.pattern { + pat.codegen(state); + self.whitespace_before_as.codegen(state); + state.add_token("as"); + self.whitespace_after_as.codegen(state); + } + if let Some(name) = &self.name { + name.codegen(state); + } else { + state.add_token("_"); + } + }) + } +} + +impl<'a> Inflate<'a> for MatchAs<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.pattern = self.pattern.inflate(config)?; + if let Some(as_tok) = self.as_tok.as_mut() { + self.whitespace_before_as = Some(parse_parenthesizable_whitespace( + config, + &mut as_tok.whitespace_before.borrow_mut(), + )?); + self.whitespace_after_as = Some(parse_parenthesizable_whitespace( + config, + &mut as_tok.whitespace_after.borrow_mut(), + )?); + } + self.name = self.name.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +pub struct MatchOrElement<'a> { + pub pattern: MatchPattern<'a>, + pub separator: Option>, +} + +impl<'a> MatchOrElement<'a> { + fn codegen(&self, state: &mut CodegenState<'a>, default_separator: bool) { + self.pattern.codegen(state); + self.separator.codegen(state); + if self.separator.is_none() && default_separator { + state.add_token(" | "); + } + } +} + +impl<'a> Inflate<'a> for MatchOrElement<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.pattern = self.pattern.inflate(config)?; + self.separator = self.separator.inflate(config)?; + Ok(self) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +pub struct MatchOr<'a> { + pub patterns: Vec>, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'a> Codegen<'a> for MatchOr<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + let len = self.patterns.len(); + for (idx, pat) in self.patterns.iter().enumerate() { + pat.codegen(state, idx + 1 < len) + } + }) + } +} + +impl<'a> Inflate<'a> for MatchOr<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.patterns = self.patterns.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) + } +} diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index 7e30b359..b6ab115c 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -7,6 +7,7 @@ use crate::{ tokenizer::whitespace_parser::{Config, WhitespaceError}, Codegen, CodegenState, Comma, EmptyLine, LeftParen, RightParen, }; +use std::ops::Deref; pub trait WithComma<'a> { fn with_comma(self, comma: Comma<'a>) -> Self; @@ -32,6 +33,24 @@ pub trait ParenthesizedNode<'a> { fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self; } +impl<'a, T: ParenthesizedNode<'a>> ParenthesizedNode<'a> for Box { + fn lpar(&self) -> &Vec> { + self.deref().lpar() + } + fn rpar(&self) -> &Vec> { + self.deref().rpar() + } + fn parenthesize(&self, state: &mut CodegenState<'a>, f: F) + where + F: FnOnce(&mut CodegenState<'a>), + { + self.deref().parenthesize(state, f) + } + fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { + Self::new((*self).with_parens(left, right)) + } +} + pub trait WithLeadingLines<'a> { fn leading_lines(&mut self) -> &mut Vec>; } diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 1bf7ca36..c881be57 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -16,6 +16,7 @@ use TokType::{ }; pub type Result<'a, T> = std::result::Result>; +type GrammarResult = std::result::Result; #[derive(Debug)] pub struct TokVec<'a>(Vec>>); @@ -150,6 +151,7 @@ parser! { / &lit("try") t:try_stmt() { CompoundStatement::Try(t) } / &lit("try") t:try_star_stmt() { CompoundStatement::TryStar(t) } / &lit("while") w:while_stmt() { CompoundStatement::While(w) } + / m:match_stmt() { CompoundStatement::Match(m) } // Simple statements @@ -529,6 +531,218 @@ parser! { } + // Match statement + + rule match_stmt() -> Match<'a> + = kw:lit("match") subject:subject_expr() col:lit(":") tok(NL, "NEWLINE") + i:tok(Indent, "INDENT") cases:case_block()+ d:tok(Dedent, "DEDENT") { + make_match(kw, subject, col, i, cases, d) + } + + rule subject_expr() -> Expression<'a> + = first:star_named_expression() c:comma() rest:star_named_expressions()? { + Expression::Tuple( + make_tuple_from_elements(first.with_comma(c), rest.unwrap_or_default()) + ) + } + / named_expression() + + rule case_block() -> MatchCase<'a> + = kw:lit("case") pattern:patterns() guard:guard()? col:lit(":") body:block() { + make_case(kw, pattern, guard, col, body) + } + + rule guard() -> (TokenRef<'a>, Expression<'a>) + = kw:lit("if") exp:named_expression() { (kw, exp) } + + rule patterns() -> MatchPattern<'a> + = pats:open_sequence_pattern() { + MatchPattern::Sequence(make_list_pattern(None, pats, None)) + } + / pattern() + + rule pattern() -> MatchPattern<'a> + = as_pattern() + / or_pattern() + + rule as_pattern() -> MatchPattern<'a> + = pat:or_pattern() kw:lit("as") target:pattern_capture_target() { + make_as_pattern(Some(pat), Some(kw), Some(target)) + } + + rule or_pattern() -> MatchPattern<'a> + = pats:separated(, ) { + make_or_pattern(pats.0, pats.1) + } + + rule closed_pattern() -> MatchPattern<'a> + = literal_pattern() + / capture_pattern() + / wildcard_pattern() + / value_pattern() + / group_pattern() + / sequence_pattern() + / mapping_pattern() + / class_pattern() + + rule literal_pattern() -> MatchPattern<'a> + = val:signed_number() !(lit("+") / lit("-")) { make_match_value(val) } + / val:complex_number() { make_match_value(val) } + / val:strings() { make_match_value(val.into()) } + / n:lit("None") { make_match_singleton(make_name(n)) } + / n:lit("True") { make_match_singleton(make_name(n)) } + / n:lit("False") { make_match_singleton(make_name(n)) } + + rule literal_expr() -> Expression<'a> + = val:signed_number() !(lit("+") / lit("-")) { val } + / val:complex_number() { val } + / val:strings() { val.into() } + / n:lit("None") { Expression::Name(make_name(n)) } + / n:lit("True") { Expression::Name(make_name(n)) } + / n:lit("False") { Expression::Name(make_name(n)) } + + rule complex_number() -> Expression<'a> + = re:signed_real_number() op:(lit("+")/lit("-")) im:imaginary_number() {? + make_binary_op(re, op, im).map_err(|_| "complex number") + } + + rule signed_number() -> Expression<'a> + = n:tok(Number, "number") { make_number(n) } + / op:lit("-") n:tok(Number, "number") {? + make_unary_op(op, make_number(n)).map_err(|_| "signed number") + } + + rule signed_real_number() -> Expression<'a> + = real_number() + / op:lit("-") n:real_number() {? + make_unary_op(op, n).map_err(|_| "signed real number") + } + + rule real_number() -> Expression<'a> + = n:tok(Number, "number") {? ensure_real_number(n) } + + rule imaginary_number() -> Expression<'a> + = n:tok(Number, "number") {? ensure_imaginary_number(n) } + + rule capture_pattern() -> MatchPattern<'a> + = t:pattern_capture_target() { make_as_pattern(None, None, Some(t)) } + + rule pattern_capture_target() -> Name<'a> + = !lit("_") n:name() !(lit(".") / lit("(") / lit("=")) { n } + + rule wildcard_pattern() -> MatchPattern<'a> + = lit("_") { make_as_pattern(None, None, None) } + + rule value_pattern() -> MatchPattern<'a> + = v:attr() !(lit(".") / lit("(") / lit("=")) { + make_match_value(v.into()) + } + + // In upstream attr and name_or_attr are mutually recursive, but rust-peg + // doesn't support this yet. + rule attr() -> NameOrAttribute<'a> + = &(name() lit(".")) v:name_or_attr() { v } + + #[cache_left_rec] + rule name_or_attr() -> NameOrAttribute<'a> + = val:name_or_attr() d:lit(".") attr:name() { + NameOrAttribute::A(make_attribute(val.into(), d, attr)) + } + / n:name() { NameOrAttribute::N(n) } + + rule group_pattern() -> MatchPattern<'a> + = l:lpar() pat:pattern() r:rpar() { pat.with_parens(l, r) } + + rule sequence_pattern() -> MatchPattern<'a> + = l:lbrak() pats:maybe_sequence_pattern()? r:rbrak() { + MatchPattern::Sequence( + make_list_pattern(Some(l), pats.unwrap_or_default(), Some(r)) + ) + } + / l:lpar() pats:open_sequence_pattern()? r:rpar() { + MatchPattern::Sequence(make_tuple_pattern(l, pats.unwrap_or_default(), r)) + } + + rule open_sequence_pattern() -> Vec> + = pat:maybe_star_pattern() c:comma() pats:maybe_sequence_pattern()? { + make_open_sequence_pattern(pat, c, pats.unwrap_or_default()) + } + + rule maybe_sequence_pattern() -> Vec> + = pats:separated_trailer(, ) { + comma_separate(pats.0, pats.1, pats.2) + } + + rule maybe_star_pattern() -> StarrableMatchSequenceElement<'a> + = s:star_pattern() { StarrableMatchSequenceElement::Starred(s) } + / p:pattern() { + StarrableMatchSequenceElement::Simple( + make_match_sequence_element(p) + ) + } + + rule star_pattern() -> MatchStar<'a> + = star:lit("*") t:pattern_capture_target() {make_match_star(star, Some(t))} + / star:lit("*") t:wildcard_pattern() { make_match_star(star, None) } + + rule mapping_pattern() -> MatchPattern<'a> + = l:lbrace() r:rbrace() { + make_match_mapping(l, vec![], None, None, None, None, r) + } + / l:lbrace() rest:double_star_pattern() trail:comma()? r:rbrace() { + make_match_mapping(l, vec![], None, Some(rest.0), Some(rest.1), trail, r) + } + / l:lbrace() items:items_pattern() c:comma() rest:double_star_pattern() + trail:comma()? r:rbrace() { + make_match_mapping(l, items, Some(c), Some(rest.0), Some(rest.1), trail, r) + } + / l:lbrace() items:items_pattern() trail:comma()? r:rbrace() { + make_match_mapping(l, items, trail, None, None, None, r) + } + + rule items_pattern() -> Vec> + = pats:separated(, ) { + comma_separate(pats.0, pats.1, None) + } + + rule key_value_pattern() -> MatchMappingElement<'a> + = key:(literal_expr() / a:attr() {a.into()}) colon:lit(":") pat:pattern() { + make_match_mapping_element(key, colon, pat) + } + + rule double_star_pattern() -> (TokenRef<'a>, Name<'a>) + = star:lit("**") n:pattern_capture_target() { (star, n) } + + rule class_pattern() -> MatchPattern<'a> + = cls:name_or_attr() l:lit("(") r:lit(")") { + make_class_pattern(cls, l, vec![], None, vec![], None, r) + } + / cls:name_or_attr() l:lit("(") pats:positional_patterns() c:comma()? r:lit(")") { + make_class_pattern(cls, l, pats, c, vec![], None, r) + } + / cls:name_or_attr() l:lit("(") kwds:keyword_patterns() c:comma()? r:lit(")") { + make_class_pattern(cls, l, vec![], None, kwds, c, r) + } + / cls:name_or_attr() l:lit("(") pats:positional_patterns() c:comma() + kwds:keyword_patterns() trail:comma()? r:lit(")") { + make_class_pattern(cls, l, pats, Some(c), kwds, trail, r) + } + + rule positional_patterns() -> Vec> + = pats:separated(, ) { + comma_separate(pats.0, pats.1, None) + } + + rule keyword_patterns() -> Vec> + = pats:separated(, ) { + comma_separate(pats.0, pats.1, None) + } + + rule keyword_pattern() -> MatchKeywordElement<'a> + = arg:name() eq:lit("=") value:pattern() { + make_match_keyword_element(arg, eq, value) + } + // Expressions #[cache] @@ -1977,6 +2191,15 @@ fn make_tuple<'a>( } } +fn make_tuple_from_elements<'a>(first: Element<'a>, mut rest: Vec>) -> Tuple<'a> { + rest.insert(0, first); + Tuple { + elements: rest, + lpar: Default::default(), + rpar: Default::default(), + } +} + fn make_kwarg<'a>(name: Name<'a>, eq: TokenRef<'a>, value: Expression<'a>) -> Arg<'a> { let equal = Some(make_assign_equal(eq)); let keyword = Some(name); @@ -3047,3 +3270,272 @@ fn make_named_expr<'a>(name: Name<'a>, tok: TokenRef<'a>, expr: Expression<'a>) walrus_tok: tok, } } + +fn make_match<'a>( + match_tok: TokenRef<'a>, + subject: Expression<'a>, + colon_tok: TokenRef<'a>, + indent_tok: TokenRef<'a>, + cases: Vec>, + dedent_tok: TokenRef<'a>, +) -> Match<'a> { + Match { + subject, + cases, + leading_lines: Default::default(), + whitespace_after_match: Default::default(), + whitespace_before_colon: Default::default(), + whitespace_after_colon: Default::default(), + indent: Default::default(), + footer: Default::default(), + match_tok, + colon_tok, + indent_tok, + dedent_tok, + } +} + +fn make_case<'a>( + case_tok: TokenRef<'a>, + pattern: MatchPattern<'a>, + guard: Option<(TokenRef<'a>, Expression<'a>)>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> MatchCase<'a> { + let (if_tok, guard) = match guard { + Some((if_tok, guard)) => (Some(if_tok), Some(guard)), + None => (None, None), + }; + MatchCase { + pattern, + guard, + body, + leading_lines: Default::default(), + whitespace_after_case: Default::default(), + whitespace_before_if: Default::default(), + whitespace_after_if: Default::default(), + whitespace_before_colon: Default::default(), + case_tok, + if_tok, + colon_tok, + } +} + +fn make_match_value(value: Expression) -> MatchPattern { + MatchPattern::Value(MatchValue { value }) +} + +fn make_match_singleton(value: Name) -> MatchPattern { + MatchPattern::Singleton(MatchSingleton { value }) +} + +fn make_list_pattern<'a>( + lbracket: Option>, + patterns: Vec>, + rbracket: Option>, +) -> MatchSequence<'a> { + MatchSequence::MatchList(MatchList { + patterns, + lbracket, + rbracket, + lpar: Default::default(), + rpar: Default::default(), + }) +} + +fn make_as_pattern<'a>( + pattern: Option>, + as_tok: Option>, + name: Option>, +) -> MatchPattern<'a> { + MatchPattern::As(Box::new(MatchAs { + pattern, + name, + lpar: Default::default(), + rpar: Default::default(), + whitespace_before_as: Default::default(), + whitespace_after_as: Default::default(), + as_tok, + })) +} + +fn make_bit_or(tok: TokenRef) -> BitOr { + BitOr { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } +} + +fn make_or_pattern<'a>( + first: MatchPattern<'a>, + rest: Vec<(TokenRef<'a>, MatchPattern<'a>)>, +) -> MatchPattern<'a> { + if rest.is_empty() { + return first; + } + + let mut patterns = vec![]; + let mut current = first; + for (sep, next) in rest { + let op = make_bit_or(sep); + patterns.push(MatchOrElement { + pattern: current, + separator: Some(op), + }); + current = next; + } + patterns.push(MatchOrElement { + pattern: current, + separator: None, + }); + MatchPattern::Or(Box::new(MatchOr { + patterns, + lpar: Default::default(), + rpar: Default::default(), + })) +} + +fn ensure_real_number(tok: TokenRef) -> GrammarResult { + match make_number(tok) { + e @ (Expression::Integer(_) | Expression::Float(_)) => Ok(e), + _ => Err("real number"), + } +} + +fn ensure_imaginary_number(tok: TokenRef) -> GrammarResult { + match make_number(tok) { + e @ Expression::Imaginary(_) => Ok(e), + _ => Err("imaginary number"), + } +} + +fn make_tuple_pattern<'a>( + lpar: LeftParen<'a>, + patterns: Vec>, + rpar: RightParen<'a>, +) -> MatchSequence<'a> { + MatchSequence::MatchTuple(MatchTuple { + patterns, + lpar: vec![lpar], + rpar: vec![rpar], + }) +} + +fn make_open_sequence_pattern<'a>( + first: StarrableMatchSequenceElement<'a>, + comma: Comma<'a>, + mut rest: Vec>, +) -> Vec> { + rest.insert(0, first.with_comma(comma)); + rest +} + +fn make_match_sequence_element(value: MatchPattern) -> MatchSequenceElement { + MatchSequenceElement { + value, + comma: Default::default(), + } +} + +fn make_match_star<'a>(star_tok: TokenRef<'a>, name: Option>) -> MatchStar<'a> { + MatchStar { + name, + comma: Default::default(), + whitespace_before_name: Default::default(), + star_tok, + } +} + +fn make_match_mapping<'a>( + lbrace: LeftCurlyBrace<'a>, + mut elements: Vec>, + el_comma: Option>, + star_tok: Option>, + rest: Option>, + trailing_comma: Option>, + rbrace: RightCurlyBrace<'a>, +) -> MatchPattern<'a> { + if let Some(c) = el_comma { + if let Some(el) = elements.pop() { + elements.push(el.with_comma(c)); + } + // TODO: else raise error + } + MatchPattern::Mapping(MatchMapping { + elements, + rest, + trailing_comma, + lbrace, + rbrace, + lpar: Default::default(), + rpar: Default::default(), + whitespace_before_rest: Default::default(), + star_tok, + }) +} + +fn make_match_mapping_element<'a>( + key: Expression<'a>, + colon_tok: TokenRef<'a>, + pattern: MatchPattern<'a>, +) -> MatchMappingElement<'a> { + MatchMappingElement { + key, + pattern, + comma: Default::default(), + whitespace_before_colon: Default::default(), + whitespace_after_colon: Default::default(), + colon_tok, + } +} + +fn make_class_pattern<'a>( + cls: NameOrAttribute<'a>, + lpar_tok: TokenRef<'a>, + mut patterns: Vec>, + pat_comma: Option>, + mut kwds: Vec>, + kwd_comma: Option>, + rpar_tok: TokenRef<'a>, +) -> MatchPattern<'a> { + if let Some(c) = pat_comma { + if let Some(el) = patterns.pop() { + patterns.push(el.with_comma(c)); + } + // TODO: else raise error + } + if let Some(c) = kwd_comma { + if let Some(el) = kwds.pop() { + kwds.push(el.with_comma(c)); + } + // TODO: else raise error + } + MatchPattern::Class(MatchClass { + cls, + patterns, + kwds, + lpar: Default::default(), + rpar: Default::default(), + whitespace_after_cls: Default::default(), + whitespace_before_patterns: Default::default(), + whitespace_after_kwds: Default::default(), + lpar_tok, + rpar_tok, + }) +} + +fn make_match_keyword_element<'a>( + key: Name<'a>, + equal_tok: TokenRef<'a>, + pattern: MatchPattern<'a>, +) -> MatchKeywordElement<'a> { + MatchKeywordElement { + key, + pattern, + comma: Default::default(), + whitespace_before_equal: Default::default(), + whitespace_after_equal: Default::default(), + equal_tok, + } +} diff --git a/native/libcst/tests/fixtures/malicious_match.py b/native/libcst/tests/fixtures/malicious_match.py new file mode 100644 index 00000000..cabf4689 --- /dev/null +++ b/native/libcst/tests/fixtures/malicious_match.py @@ -0,0 +1,39 @@ + +# foo + +match ( foo ) : #comment + +# more comments + case False : # comment + + ... + case ( True ) : ... + case _ : ... + case ( _ ) : ... # foo + +# bar + +match x: + case "StringMatchValue" : pass + case [1, 2] : pass + case [ 1 , * foo , * _ , ]: pass + case [ [ _, ] , *_ ]: pass + case {1: _, 2: _}: pass + case { "foo" : bar , ** rest } : pass + case { 1 : {**rest} , } : pass + case Point2D(): pass + case Cls ( 0 , ) : pass + case Cls ( x=0, y = 2) :pass + case Cls ( 0 , 1 , x = 0 , y = 2 ) : pass + case [x] as y: pass + case [x] as y : pass + case (True)as x:pass + case Foo:pass + case (Foo):pass + case ( Foo ) : pass + case [ ( Foo ) , ]: pass + case Foo|Bar|Baz : pass + case Foo | Bar | ( Baz): pass + case x,y , * more :pass + case y.z: pass + From b5fb9d79c53147ace5404b98b4af340d33f9626a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 4 Jan 2022 17:01:09 +0000 Subject: [PATCH 169/632] Fix pypi upload CI step (#570) * pass in LIBCST_NO_LOCAL_SCHEME to CIBW * only build a source tarball on one platform * factor out upload into a separate job --- .github/workflows/build.yml | 58 +++++++++++++++++++++++++++---------- 1 file changed, 43 insertions(+), 15 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 31914cc6..aeb74bb2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -198,7 +198,7 @@ jobs: CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y" CIBW_BEFORE_ALL_MACOS: "rustup target add aarch64-apple-darwin x86_64-apple-darwin" CIBW_BEFORE_ALL_WINDOWS: "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" - CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin"' + CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME' CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_*" CIBW_ARCHS: ${{ matrix.vers }} CIBW_BUILD_VERBOSITY: 1 @@ -224,20 +224,48 @@ jobs: - uses: actions/upload-artifact@v2 with: path: wheelhouse/*.whl - - name: Build a source tarball - run: >- - python -m - build - --sdist - --outdir wheelhouse/ - - if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} - name: Publish distribution 📦 to Test PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.TEST_PYPI_API_TOKEN }} - repository_url: https://test.pypi.org/legacy/ - packages_dir: wheelhouse/ + name: wheels + + pypi: + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} + name: Upload wheels to pypi + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v1 + - name: Download binary wheels + id: download + uses: actions/download-artifact@v2 + with: + name: wheels + path: wheelhouse + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Validate Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: exit 1 + - name: Disable scmtools local scheme + run: >- + echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV + - name: Build a source tarball + run: >- + python -m + build + --sdist + --outdir ${{ steps.download.outputs.download-path }} + - name: Publish distribution 📦 to Test PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_API_TOKEN }} + repository_url: https://test.pypi.org/legacy/ + packages_dir: ${{ steps.download.outputs.download-path }} # Test rust parts native: From cff47b767bbbb7c0f8cfc7670173fe1a39703daa Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 4 Jan 2022 20:03:59 +0000 Subject: [PATCH 170/632] fix python version in pypi upload step (#576) --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index aeb74bb2..b2cecde9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -241,7 +241,7 @@ jobs: path: wheelhouse - uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: "3.10" - uses: actions/cache@v2 id: cache with: From 86431eea89b54f3d3749034cc2cfded3dbe55240 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 4 Jan 2022 20:04:21 +0000 Subject: [PATCH 171/632] Make sure dedents are emitted for inputs without trailing newlines (#573) --- native/libcst/src/tokenizer/core/mod.rs | 15 ++++++- native/libcst/src/tokenizer/tests.rs | 40 +++++++++++++++++++ .../libcst/tests/fixtures/dangling_indent.py | 3 ++ 3 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 native/libcst/tests/fixtures/dangling_indent.py diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 5e8c29a1..3ff97e7d 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -342,7 +342,20 @@ impl<'t> TokState<'t> { self.missing_nl_before_eof = false; Ok(TokType::Newline) } else { - Ok(TokType::EndMarker) + let hanging_indents = self.indent_stack.len() as i32; + if self.pending_indents == 0 && hanging_indents != 0 { + // We've reached EOF but there are still pending indents not + // accounted for. Flush them out. + self.pending_indents = -hanging_indents; + self.indent_stack.clear(); + self.alt_indent_stack.clear(); + self.missing_nl_before_eof = false; + } + if let Some(t) = self.process_pending_indents() { + Ok(t) + } else { + Ok(TokType::EndMarker) + } } } diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index 6b4dfbb1..db437857 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -687,3 +687,43 @@ fn test_add_dedents_after_fake_newline() { ]) ); } + +#[test] +fn test_add_dedents_for_dangling_indent() { + assert_eq!( + tokenize_with_end_marker("if 1:\n if 2:\n ", &default_config()), + Ok(vec![ + (TokType::Name, "if"), + (TokType::Number, "1"), + (TokType::Op, ":"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + (TokType::Name, "if"), + (TokType::Number, "2"), + (TokType::Op, ":"), + (TokType::Newline, "\n"), + (TokType::Dedent, ""), + (TokType::EndMarker, "") + ]) + ); +} + +#[test] +fn test_add_dedents_for_dangling_indent_with_comment() { + assert_eq!( + tokenize_with_end_marker("if 1:\n if 2:\n # foo", &default_config()), + Ok(vec![ + (TokType::Name, "if"), + (TokType::Number, "1"), + (TokType::Op, ":"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + (TokType::Name, "if"), + (TokType::Number, "2"), + (TokType::Op, ":"), + (TokType::Newline, "\n"), + (TokType::Dedent, ""), + (TokType::EndMarker, "") + ]) + ); +} diff --git a/native/libcst/tests/fixtures/dangling_indent.py b/native/libcst/tests/fixtures/dangling_indent.py new file mode 100644 index 00000000..32ac6230 --- /dev/null +++ b/native/libcst/tests/fixtures/dangling_indent.py @@ -0,0 +1,3 @@ +if 1: + pass + \ No newline at end of file From b939bf2998111fbdbecb086c4622e82321dc9a26 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 5 Jan 2022 15:43:43 +0000 Subject: [PATCH 172/632] run tests with 3.10 too (#577) --- .github/workflows/build.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b2cecde9..de812dc6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,7 +10,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.6, 3.7, 3.8, 3.9] + python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] steps: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 @@ -34,7 +34,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.6, 3.7, 3.8, 3.9] + python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] parser: [pure, native] steps: - uses: actions/checkout@v1 @@ -66,7 +66,7 @@ jobs: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - uses: actions/cache@v2 id: cache with: @@ -87,7 +87,7 @@ jobs: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - uses: actions/cache@v2 id: cache with: @@ -122,7 +122,7 @@ jobs: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - uses: actions/cache@v2 id: cache with: @@ -154,7 +154,7 @@ jobs: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - uses: actions/cache@v2 id: cache with: @@ -206,7 +206,7 @@ jobs: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - uses: actions/cache@v2 id: cache with: @@ -283,7 +283,7 @@ jobs: components: rustfmt, clippy - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - name: test uses: actions-rs/cargo@v1 with: From 73ecdf45c3ae2a9c5af567f64370d8913917f378 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 5 Jan 2022 17:09:30 +0000 Subject: [PATCH 173/632] only run CI on pushes to main and pull requests (#581) --- .github/workflows/build.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index de812dc6..2bb89f75 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,6 +1,10 @@ name: Python CI -on: [push, pull_request] +on: + push: + branches: + - main + pull_request: jobs: # Install and cache dependencies From d9a1dc84739495090ecedba59086a98fcf03689c Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 5 Jan 2022 18:13:01 +0000 Subject: [PATCH 174/632] Fix all type errors (#579) * bump pyre version * make sure CI-pyre uses working copy * remove unused pyre suppressions * suppress invalid decorations * fix undefined attributes * fix missing return annotations * fix tuple concatenation issues * add native stubs * fix invalid typing of **kwargs in test_apply_type_annotations * only install pyre on non-windows * update test fixture to reflect changes in recent pyre versions * suppress errors related to mismatched positions --- .../workflows/.pyre_configuration.template | 1 + .github/workflows/build.yml | 4 +- .pyre_configuration.example | 3 +- libcst/_add_slots.py | 4 ++ libcst/_nodes/base.py | 2 +- libcst/_nodes/expression.py | 17 +++--- libcst/_nodes/statement.py | 7 ++- libcst/_nodes/tests/test_assign.py | 2 +- libcst/_nodes/tests/test_match.py | 7 ++- libcst/_nodes/tests/test_try.py | 6 +- libcst/_parser/base_parser.py | 1 - libcst/_parser/conversions/expression.py | 13 ++-- libcst/_parser/conversions/params.py | 12 +--- libcst/_parser/conversions/statement.py | 4 ++ libcst/_parser/parso/python/tokenize.py | 4 ++ libcst/_parser/production_decorator.py | 4 ++ libcst/_parser/types/config.py | 1 - libcst/_parser/whitespace_parser.py | 4 -- libcst/_position.py | 4 +- libcst/_type_enforce.py | 12 +++- libcst/_typed_visitor_base.py | 1 + libcst/codemod/_cli.py | 12 ++-- libcst/codemod/_command.py | 4 +- libcst/codemod/_testing.py | 1 + .../commands/convert_format_to_fstring.py | 6 +- .../convert_percent_format_to_fstring.py | 19 ++++-- .../codemod/commands/ensure_import_present.py | 8 +-- libcst/codemod/commands/rename.py | 6 +- libcst/codemod/visitors/_add_imports.py | 22 +++---- .../visitors/_gather_unused_imports.py | 4 ++ .../tests/test_apply_type_annotations.py | 15 +++-- libcst/helpers/module.py | 2 + libcst/matchers/_decorators.py | 4 ++ libcst/matchers/_matcher_base.py | 59 ++++++++++++++----- libcst/matchers/_visitors.py | 13 +++- libcst/metadata/scope_provider.py | 3 +- .../metadata/tests/test_metadata_wrapper.py | 3 - libcst/metadata/tests/test_name_provider.py | 5 -- libcst/metadata/tests/test_scope_provider.py | 18 +++++- .../tests/test_type_inference_provider.py | 9 +-- libcst/metadata/type_inference_provider.py | 2 + libcst/metadata/wrapper.py | 1 - libcst/testing/utils.py | 2 +- libcst/tests/pyre/.pyre_configuration | 3 +- libcst/tests/pyre/simple_class.json | 4 +- libcst/tests/test_pyre_integration.py | 11 ++-- pyproject.toml | 2 +- requirements-dev.txt | 2 +- stubs/libcst/native.pyi | 11 ++++ stubs/tokenize.pyi | 1 - 50 files changed, 230 insertions(+), 135 deletions(-) create mode 100644 stubs/libcst/native.pyi diff --git a/.github/workflows/.pyre_configuration.template b/.github/workflows/.pyre_configuration.template index 4b5b2007..b2ed4b8d 100644 --- a/.github/workflows/.pyre_configuration.template +++ b/.github/workflows/.pyre_configuration.template @@ -8,5 +8,6 @@ "search_path": [ "stubs", "{{ .python_site_packages }}" ], + "workers": 3, "strict": true } diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2bb89f75..9401bf1d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -113,8 +113,10 @@ jobs: python_site_packages: ${{ steps.python-info.outputs.SITE_PACKAGES }} - name: Write Pyre Config run: echo '${{ steps.pyre_template.outputs.result }}' > .pyre_configuration + - name: Make sure Pyre uses the working copy + run: pip install -e . - run: pyre --version - - run: pyre check + - run: pyre -n check - run: python libcst/tests/test_pyre_integration.py - run: git diff --exit-code diff --git a/.pyre_configuration.example b/.pyre_configuration.example index 784a7958..0223ce37 100644 --- a/.pyre_configuration.example +++ b/.pyre_configuration.example @@ -6,7 +6,8 @@ "stubs" ], "exclude": [ - ".*/\\.tox/.*" + ".*/\\.tox/.*", + ".*/native/.*" ], "strict": true } diff --git a/libcst/_add_slots.py b/libcst/_add_slots.py index a62b2f2d..6e9c0041 100644 --- a/libcst/_add_slots.py +++ b/libcst/_add_slots.py @@ -34,9 +34,13 @@ def add_slots(cls: Type[_T]) -> Type[_T]: # by the removal of GenericMeta. We should just be able to use cls.__bases__ in the # future. bases = getattr(cls, "__orig_bases__", cls.__bases__) + # pyre-fixme[9]: cls has type `Type[Variable[_T]]`; used as `_T`. + # pyre-fixme[19]: Expected 0 positional arguments. cls = type(cls)(cls.__name__, bases, cls_dict) except TypeError: # We're in py3.7 and should use cls.__bases__ + # pyre-fixme[9]: cls has type `Type[Variable[_T]]`; used as `_T`. + # pyre-fixme[19]: Expected 0 positional arguments. cls = type(cls)(cls.__name__, cls.__bases__, cls_dict) if qualname is not None: cls.__qualname__ = qualname diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index fd86cad4..9173414b 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -423,7 +423,7 @@ class CSTNode(ABC): raise Exception("Logic error, cannot get a *Sentinel here!") return new_tree - def __eq__(self: _CSTNodeSelfT, other: _CSTNodeSelfT) -> bool: + def __eq__(self: _CSTNodeSelfT, other: object) -> bool: """ CSTNodes are only treated as equal by identity. This matches the behavior of CPython's AST nodes. diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 2dc7c882..dba5faf3 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -1844,7 +1844,6 @@ class Parameters(CSTNode): if len(vals) == 0: return for val in vals: - # pyre-ignore Pyre seems to think val.star.__eq__ is not callable if isinstance(val.star, str) and val.star != "": raise CSTValidationError( f"Expecting a star prefix of '' for {section} Param." @@ -1864,6 +1863,8 @@ class Parameters(CSTNode): def _validate_defaults(self) -> None: seen_default = False + # pyre-fixme[60]: Concatenation not yet support for multiple variadic + # tuples: `*self.posonly_params, *self.params`. for param in (*self.posonly_params, *self.params): if param.default: # Mark that we've moved onto defaults @@ -1891,7 +1892,6 @@ class Parameters(CSTNode): if ( isinstance(star_arg, Param) and isinstance(star_arg.star, str) - # pyre-ignore Pyre seems to think star_kwarg.star.__eq__ is not callable and star_arg.star != "*" ): raise CSTValidationError( @@ -1903,7 +1903,6 @@ class Parameters(CSTNode): if ( star_kwarg is not None and isinstance(star_kwarg.star, str) - # pyre-ignore Pyre seems to think star_kwarg.star.__eq__ is not callable and star_kwarg.star != "**" ): raise CSTValidationError( @@ -2194,9 +2193,7 @@ class _BaseExpressionWithArgs(BaseExpression, ABC): #: Sequence of arguments that will be passed to the function call. args: Sequence[Arg] = () - def _check_kwargs_or_keywords( - self, arg: Arg - ) -> Optional[Callable[[Arg], Callable]]: + def _check_kwargs_or_keywords(self, arg: Arg) -> None: """ Validates that we only have a mix of "keyword=arg" and "**arg" expansion. """ @@ -2220,7 +2217,7 @@ class _BaseExpressionWithArgs(BaseExpression, ABC): def _check_starred_or_keywords( self, arg: Arg - ) -> Optional[Callable[[Arg], Callable]]: + ) -> Optional[Callable[[Arg], Callable[[Arg], None]]]: """ Validates that we only have a mix of "*arg" expansion and "keyword=arg". """ @@ -2243,7 +2240,9 @@ class _BaseExpressionWithArgs(BaseExpression, ABC): "Cannot have positional argument after keyword argument." ) - def _check_positional(self, arg: Arg) -> Optional[Callable[[Arg], Callable]]: + def _check_positional( + self, arg: Arg + ) -> Optional[Callable[[Arg], Callable[[Arg], Callable[[Arg], None]]]]: """ Validates that we only have a mix of positional args and "*arg" expansion. """ @@ -2267,6 +2266,8 @@ class _BaseExpressionWithArgs(BaseExpression, ABC): # Valid, allowed to have positional arguments here return None + # pyre-fixme[30]: Pyre gave up inferring some types - function `_validate` was + # too complex. def _validate(self) -> None: # Validate any super-class stuff, whatever it may be. super()._validate() diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 7d9208df..13519498 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -2308,7 +2308,6 @@ class Raise(BaseSmallStatement): not isinstance(whitespace_before_from, MaybeSentinel) and whitespace_before_from.empty ) - # pyre-ignore Pyre thinks exc is Optional if has_no_gap and not exc._safe_to_use_with_word_operator( ExpressionPosition.LEFT ): @@ -2590,6 +2589,7 @@ class MatchPattern(_BaseParenthesizedNode, ABC): @add_slots @dataclass(frozen=True) +# pyre-fixme[13]: Attribute `body` is never initialized. class Match(BaseCompoundStatement): """ A ``match`` statement. @@ -2731,9 +2731,13 @@ class MatchCase(CSTNode): self, "whitespace_after_case", self.whitespace_after_case, visitor ), pattern=visit_required(self, "pattern", self.pattern, visitor), + # pyre-fixme[6]: Expected `SimpleWhitespace` for 4th param but got + # `Optional[SimpleWhitespace]`. whitespace_before_if=visit_optional( self, "whitespace_before_if", self.whitespace_before_if, visitor ), + # pyre-fixme[6]: Expected `SimpleWhitespace` for 5th param but got + # `Optional[SimpleWhitespace]`. whitespace_after_if=visit_optional( self, "whitespace_after_if", self.whitespace_after_if, visitor ), @@ -2817,6 +2821,7 @@ class MatchSingleton(MatchPattern): @lpar.setter def lpar(self, value: Sequence[LeftParen]) -> None: + # pyre-fixme[41]: Cannot reassign final attribute `lpar`. self.value.lpar = value diff --git a/libcst/_nodes/tests/test_assign.py b/libcst/_nodes/tests/test_assign.py index b91533b3..7df2fdee 100644 --- a/libcst/_nodes/tests/test_assign.py +++ b/libcst/_nodes/tests/test_assign.py @@ -422,7 +422,7 @@ class AugAssignTest(CSTNodeTest): operator=cst.Add(), right=cst.Integer("1"), ), - operator=cst.Add(), + operator=cst.AddAssign(), value=cst.Name("y"), ) ), diff --git a/libcst/_nodes/tests/test_match.py b/libcst/_nodes/tests/test_match.py index ffa7b4b7..edf51d84 100644 --- a/libcst/_nodes/tests/test_match.py +++ b/libcst/_nodes/tests/test_match.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any +from typing import Any, Callable, Optional import libcst as cst from libcst import parse_statement @@ -11,11 +11,12 @@ from libcst._nodes.tests.base import CSTNodeTest from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider -parser = parse_statement if is_native() else None +parser: Optional[Callable[[str], cst.CSTNode]] = ( + parse_statement if is_native() else None +) class MatchTest(CSTNodeTest): - # pyre-fixme[56]: Invalid decoration - Pyre was not able to infer the type @data_provider( ( # Values and singletons diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index f61cbde0..c04a9db2 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any +from typing import Any, Callable, Optional import libcst as cst from libcst import parse_statement @@ -12,7 +12,9 @@ from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider -native_parse_statement = parse_statement if is_native() else None +native_parse_statement: Optional[Callable[[str], cst.CSTNode]] = ( + parse_statement if is_native() else None +) class TryTest(CSTNodeTest): diff --git a/libcst/_parser/base_parser.py b/libcst/_parser/base_parser.py index 04b9bd11..ef9e1519 100644 --- a/libcst/_parser/base_parser.py +++ b/libcst/_parser/base_parser.py @@ -204,7 +204,6 @@ class BaseParser(Generic[_TokenT, _TokenTypeT, _NodeT]): ) # Logically, `plan` is always defined, but pyre can't reasonably determine that. - # pyre-fixme[18]: Global name `plan` is undefined. stack[-1].dfa = plan.next_dfa for push in plan.dfa_pushes: diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index 53c6b0a9..a3ba90ac 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -616,6 +616,8 @@ def convert_atom_expr_trailer( config, trailer.whitespace_before ), lbracket=trailer.lbracket, + # pyre-fixme[6]: Expected `Sequence[SubscriptElement]` for 4th param + # but got `Union[typing.Sequence[SubscriptElement], Index, Slice]`. slice=trailer.slice, rbracket=trailer.rbracket, ) @@ -643,6 +645,8 @@ def convert_atom_expr_trailer( config, trailer.lpar.whitespace_before ), whitespace_before_args=trailer.lpar.value.whitespace_after, + # pyre-fixme[6]: Expected `Sequence[Arg]` for 4th param but got + # `Tuple[object, ...]`. args=tuple(args), ) else: @@ -778,13 +782,10 @@ def convert_subscript( first_colon=Colon( whitespace_before=parse_parenthesizable_whitespace( config, - # pyre-fixme[16]: Optional type has no attribute - # `whitespace_before`. first_colon.whitespace_before, ), whitespace_after=parse_parenthesizable_whitespace( config, - # pyre-fixme[16]: Optional type has no attribute `whitespace_after`. first_colon.whitespace_after, ), ), @@ -948,7 +949,10 @@ def convert_atom_parens( inner_atom = atoms[0].value return WithLeadingWhitespace( inner_atom.with_changes( - lpar=(lpar, *inner_atom.lpar), rpar=(*inner_atom.rpar, rpar) + # pyre-fixme[60]: Expected to unpack an iterable, but got `unknown`. + lpar=(lpar, *inner_atom.lpar), + # pyre-fixme[60]: Expected to unpack an iterable, but got `unknown`. + rpar=(*inner_atom.rpar, rpar), ), lpar_tok.whitespace_before, ) @@ -1237,7 +1241,6 @@ def _convert_sequencelike( # lpar/rpar are the responsibility of our parent return WithLeadingWhitespace( - # pyre-ignore[29]: `Union[Type[List], Type[Set], Type[Tuple]]` is not a function. sequence_type(elements, lpar=(), rpar=()), children[0].whitespace_before, ) diff --git a/libcst/_parser/conversions/params.py b/libcst/_parser/conversions/params.py index aa51d681..9ac7f1d1 100644 --- a/libcst/_parser/conversions/params.py +++ b/libcst/_parser/conversions/params.py @@ -140,7 +140,6 @@ def convert_argslist( # noqa: C901 "Cannot have multiple slash ('/') markers in a single argument " + "list." ) - # pyre-ignore Pyre seems to think param.star.__eq__ is not callable elif isinstance(param.star, str) and param.star == "" and param.default is None: # Can only add this if we're in the params or kwonly_params section if current_param is params and not seen_default: @@ -156,7 +155,6 @@ def convert_argslist( # noqa: C901 ) elif ( isinstance(param.star, str) - # pyre-ignore Pyre seems to think param.star.__eq__ is not callable and param.star == "" and param.default is not None ): @@ -172,10 +170,7 @@ def convert_argslist( # noqa: C901 # This should be unreachable, the grammar already disallows it. raise Exception("Cannot have any arguments after a kwargs expansion.") elif ( - isinstance(param.star, str) - # pyre-ignore Pyre seems to think param.star.__eq__ is not callable - and param.star == "*" - and param.default is None + isinstance(param.star, str) and param.star == "*" and param.default is None ): # Can only add this if we're in params, since we only allow one of # "*" or "*param". @@ -191,10 +186,7 @@ def convert_argslist( # noqa: C901 + "argument expansion." ) elif ( - isinstance(param.star, str) - # pyre-ignore Pyre seems to think param.star.__eq__ is not callable - and param.star == "**" - and param.default is None + isinstance(param.star, str) and param.star == "**" and param.default is None ): # Can add this in all cases where we don't have a star_kwarg # yet. diff --git a/libcst/_parser/conversions/statement.py b/libcst/_parser/conversions/statement.py index 96e0be1a..608f002f 100644 --- a/libcst/_parser/conversions/statement.py +++ b/libcst/_parser/conversions/statement.py @@ -1300,6 +1300,10 @@ def convert_decorated(config: ParserConfig, children: Sequence[Any]) -> Any: # Now, modify the original function or class to add the decorators. return class_or_func.with_changes( leading_lines=leading_lines, + # pyre-fixme[60]: Concatenation not yet support for multiple variadic + # tuples: `*class_or_func.leading_lines, + # *class_or_func.lines_after_decorators`. + # pyre-fixme[60]: Expected to unpack an iterable, but got `unknown`. lines_after_decorators=( *class_or_func.leading_lines, *class_or_func.lines_after_decorators, diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index 380246f0..bfd159dd 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -580,6 +580,7 @@ def _tokenize_lines_py36_or_below( # noqa: C901 if not pseudomatch: # scan for tokens match = token_collection.whitespace.match(line, pos) if pos == 0: + # pyre-fixme[16]: `Optional` has no attribute `end`. yield from dedent_if_necessary(match.end()) pos = match.end() new_line = False @@ -587,6 +588,7 @@ def _tokenize_lines_py36_or_below( # noqa: C901 ERRORTOKEN, line[pos], (lnum, pos), + # pyre-fixme[16]: `Optional` has no attribute `group`. additional_prefix + match.group(0), ) additional_prefix = "" @@ -935,6 +937,7 @@ def _tokenize_lines_py37_or_above( # noqa: C901 if not pseudomatch: # scan for tokens match = token_collection.whitespace.match(line, pos) if pos == 0: + # pyre-fixme[16]: `Optional` has no attribute `end`. for t in dedent_if_necessary(match.end()): yield t pos = match.end() @@ -943,6 +946,7 @@ def _tokenize_lines_py37_or_above( # noqa: C901 ERRORTOKEN, line[pos], (lnum, pos), + # pyre-fixme[16]: `Optional` has no attribute `group`. additional_prefix + match.group(0), ) additional_prefix = "" diff --git a/libcst/_parser/production_decorator.py b/libcst/_parser/production_decorator.py index 3515e0b4..41a817f8 100644 --- a/libcst/_parser/production_decorator.py +++ b/libcst/_parser/production_decorator.py @@ -21,6 +21,10 @@ def with_production( *, version: Optional[str] = None, future: Optional[str] = None, + # pyre-fixme[34]: `Variable[_NonterminalConversionT (bound to + # typing.Callable[[libcst_native.parser_config.ParserConfig, + # typing.Sequence[typing.Any]], typing.Any])]` isn't present in the function's + # parameters. ) -> Callable[[_NonterminalConversionT], _NonterminalConversionT]: """ Attaches a bit of grammar to a conversion function. The parser extracts all of these diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index 1233de0c..1fc32371 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -23,7 +23,6 @@ try: except ImportError: from libcst._parser.types import py_config as config_mod - # pyre-fixme[9]: This is a small implementation difference between native and python MockWhitespaceParserConfig = config_mod.MockWhitespaceParserConfig BaseWhitespaceParserConfig = config_mod.BaseWhitespaceParserConfig diff --git a/libcst/_parser/whitespace_parser.py b/libcst/_parser/whitespace_parser.py index 1fe5a051..1c29efc5 100644 --- a/libcst/_parser/whitespace_parser.py +++ b/libcst/_parser/whitespace_parser.py @@ -23,11 +23,7 @@ try: except ImportError: from libcst._parser import py_whitespace_parser as mod -# pyre-fixme[5]: There's no sane way to type these re-exports parse_simple_whitespace = mod.parse_simple_whitespace -# pyre-fixme[5]: There's no sane way to type these re-exports parse_empty_lines = mod.parse_empty_lines -# pyre-fixme[5]: There's no sane way to type these re-exports parse_trailing_whitespace = mod.parse_trailing_whitespace -# pyre-fixme[5]: There's no sane way to type these re-exports parse_parenthesizable_whitespace = mod.parse_parenthesizable_whitespace diff --git a/libcst/_position.py b/libcst/_position.py index ebf3801e..d7ba0d07 100644 --- a/libcst/_position.py +++ b/libcst/_position.py @@ -31,6 +31,8 @@ class CodePosition: @add_slots @dataclass(frozen=True) +# pyre-fixme[13]: Attribute `end` is never initialized. +# pyre-fixme[13]: Attribute `start` is never initialized. class CodeRange: #: Starting position of a node (inclusive). start: CodePosition @@ -45,8 +47,6 @@ class CodeRange: def __init__(self, start: Tuple[int, int], end: Tuple[int, int]) -> None: ... - # pyre-ignore[13]: Attribute `end` is never initialized. - # pyre-ignore[13]: Attribute `start` is never initialized. def __init__(self, start: _CodePositionT, end: _CodePositionT) -> None: if isinstance(start, tuple) and isinstance(end, tuple): object.__setattr__(self, "start", CodePosition(start[0], start[1])) diff --git a/libcst/_type_enforce.py b/libcst/_type_enforce.py index 6629eb87..3eb72e77 100644 --- a/libcst/_type_enforce.py +++ b/libcst/_type_enforce.py @@ -11,11 +11,16 @@ from typing_inspect import get_args, get_origin, is_classvar, is_typevar, is_uni try: # py37+ from typing import ForwardRef except ImportError: # py36 + # pyre-fixme[21]: Could not find name `_ForwardRef` in `typing` (stubbed). from typing import _ForwardRef as ForwardRef def is_value_of_type( # noqa: C901 "too complex" - value: Any, expected_type: Any, invariant_check: bool = False + # pyre-fixme[2]: Parameter annotation cannot be `Any`. + value: Any, + # pyre-fixme[2]: Parameter annotation cannot be `Any`. + expected_type: Any, + invariant_check: bool = False, ) -> bool: """ This method attempts to verify a given value is of a given type. If the type is @@ -79,7 +84,7 @@ def is_value_of_type( # noqa: C901 "too complex" # We don't want to include Tuple subclasses, like NamedTuple, because they're # unlikely to behave similarly. elif expected_origin_type in [Tuple, tuple]: # py36 uses Tuple, py37+ uses tuple - if not isinstance(value, Tuple): + if not isinstance(value, tuple): return False type_args = get_args(expected_type, evaluate=True) @@ -133,7 +138,8 @@ def is_value_of_type( # noqa: C901 "too complex" # Similarly, tuple subclasses tend to have pretty different behavior, and we should # fall back to the default check. elif issubclass(expected_origin_type, Iterable) and not issubclass( - expected_origin_type, (str, bytes, Tuple) + expected_origin_type, + (str, bytes, tuple), ): # We know this thing is *some* kind of Iterable, but we want to # allow subclasses. That means we want [1,2,3] to match both diff --git a/libcst/_typed_visitor_base.py b/libcst/_typed_visitor_base.py index 61b3c2d5..3b1bd2db 100644 --- a/libcst/_typed_visitor_base.py +++ b/libcst/_typed_visitor_base.py @@ -8,6 +8,7 @@ from typing import Any, Callable, cast, TYPE_CHECKING, TypeVar if TYPE_CHECKING: from libcst._typed_visitor import CSTTypedBaseFunctions # noqa: F401 +# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters. F = TypeVar("F", bound=Callable) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 5a25a794..a7b18783 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -160,19 +160,19 @@ def exec_transform_with_prettyprint( return code result = transform_module(transform, code, python_version=python_version) - code: Optional[str] = ( + maybe_code: Optional[str] = ( None if isinstance(result, (TransformFailure, TransformExit, TransformSkip)) else result.code ) - if code is not None and format_code: + if maybe_code is not None and format_code: try: - code = invoke_formatter(formatter_args, code) + maybe_code = invoke_formatter(formatter_args, maybe_code) except Exception as ex: # Failed to format code, treat as a failure and make sure that # we print the exception for debugging. - code = None + maybe_code = None result = TransformFailure( error=ex, traceback_str=traceback.format_exc(), @@ -181,7 +181,7 @@ def exec_transform_with_prettyprint( # Finally, print the output, regardless of what happened print_execution_result(result) - return code + return maybe_code def _calculate_module(repo_root: Optional[str], filename: str) -> Optional[str]: @@ -572,7 +572,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 chunksize = 4 # Grab number of cores if we need to - jobs: int = min( + jobs = min( jobs if jobs is not None else cpu_count(), (len(files) + chunksize - 1) // chunksize, ) diff --git a/libcst/codemod/_command.py b/libcst/codemod/_command.py index 2355d492..55a57247 100644 --- a/libcst/codemod/_command.py +++ b/libcst/codemod/_command.py @@ -137,13 +137,11 @@ class MagicArgsCodemodCommand(CodemodCommand, ABC): """ ... - def _instantiate(self, transform: Type[Codemod]) -> Codemod: + def _instantiate(self, transform: Type[_Codemod]) -> _Codemod: # Grab the expected arguments argspec = inspect.getfullargspec(transform.__init__) args: List[object] = [] kwargs: Dict[str, object] = {} - # pyre-fixme[6]: Expected `Sized` for 1st param but got `Union[Tuple[], - # Tuple[Any, ...]]`. last_default_arg = len(argspec.args) - len(argspec.defaults or ()) for i, arg in enumerate(argspec.args): if arg in ["self", "context"]: diff --git a/libcst/codemod/_testing.py b/libcst/codemod/_testing.py index c727acbf..fb4d85c2 100644 --- a/libcst/codemod/_testing.py +++ b/libcst/codemod/_testing.py @@ -98,6 +98,7 @@ class _CodemodTest: """ context = context_override if context_override is not None else CodemodContext() + # pyre-fixme[45]: Cannot instantiate abstract class `Codemod`. transform_instance = self.TRANSFORM(context, *args, **kwargs) input_tree = parse_module( CodemodTest.make_fixture_data(before), diff --git a/libcst/codemod/commands/convert_format_to_fstring.py b/libcst/codemod/commands/convert_format_to_fstring.py index bebff329..a74b5342 100644 --- a/libcst/codemod/commands/convert_format_to_fstring.py +++ b/libcst/codemod/commands/convert_format_to_fstring.py @@ -223,8 +223,8 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): DESCRIPTION: str = "Converts instances of str.format() to f-string." @staticmethod - def add_args(parser: argparse.ArgumentParser) -> None: - parser.add_argument( + def add_args(arg_parser: argparse.ArgumentParser) -> None: + arg_parser.add_argument( "--allow-strip-comments", dest="allow_strip_comments", help=( @@ -233,7 +233,7 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): ), action="store_true", ) - parser.add_argument( + arg_parser.add_argument( "--allow-await", dest="allow_await", help=( diff --git a/libcst/codemod/commands/convert_percent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py index 4ba1e753..9908a5b6 100644 --- a/libcst/codemod/commands/convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/convert_percent_format_to_fstring.py @@ -3,8 +3,9 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # +import itertools import re -from typing import Callable, cast +from typing import Callable, cast, List, Sequence import libcst as cst import libcst.matchers as m @@ -73,8 +74,10 @@ class ConvertPercentFormatStringCommand(VisitorBasedCodemodCommand): extracts = m.extract( original_node, m.BinaryOperation( + # pyre-fixme[6]: Expected `Union[m._matcher_base.AllOf[typing.Union[m... left=m.MatchIfTrue(_match_simple_string), operator=m.Modulo(), + # pyre-fixme[6]: Expected `Union[m._matcher_base.AllOf[typing.Union[m... right=m.SaveMatchedNode( m.MatchIfTrue(_gen_match_simple_expression(self.module)), expr_key, @@ -83,7 +86,8 @@ class ConvertPercentFormatStringCommand(VisitorBasedCodemodCommand): ) if extracts: - expr = extracts[expr_key] + exprs = extracts[expr_key] + exprs = (exprs,) if not isinstance(exprs, Sequence) else exprs parts = [] simple_string = cst.ensure_type(original_node.left, cst.SimpleString) innards = simple_string.raw_value.replace("{", "{{").replace("}", "}}") @@ -91,10 +95,13 @@ class ConvertPercentFormatStringCommand(VisitorBasedCodemodCommand): token = tokens[0] if len(token) > 0: parts.append(cst.FormattedStringText(value=token)) - expressions = ( - [elm.value for elm in expr.elements] - if isinstance(expr, cst.Tuple) - else [expr] + expressions: List[cst.CSTNode] = list( + *itertools.chain( + [elm.value for elm in expr.elements] + if isinstance(expr, cst.Tuple) + else [expr] + for expr in exprs + ) ) escape_transformer = EscapeStringQuote(simple_string.quote) i = 1 diff --git a/libcst/codemod/commands/ensure_import_present.py b/libcst/codemod/commands/ensure_import_present.py index eaf16123..a97fddfd 100644 --- a/libcst/codemod/commands/ensure_import_present.py +++ b/libcst/codemod/commands/ensure_import_present.py @@ -18,8 +18,8 @@ class EnsureImportPresentCommand(MagicArgsCodemodCommand): ) @staticmethod - def add_args(parser: argparse.ArgumentParser) -> None: - parser.add_argument( + def add_args(arg_parser: argparse.ArgumentParser) -> None: + arg_parser.add_argument( "--module", dest="module", metavar="MODULE", @@ -27,7 +27,7 @@ class EnsureImportPresentCommand(MagicArgsCodemodCommand): type=str, required=True, ) - parser.add_argument( + arg_parser.add_argument( "--entity", dest="entity", metavar="ENTITY", @@ -38,7 +38,7 @@ class EnsureImportPresentCommand(MagicArgsCodemodCommand): type=str, default=None, ) - parser.add_argument( + arg_parser.add_argument( "--alias", dest="alias", metavar="ALIAS", diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index cd0a417f..4b525ab3 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -42,15 +42,15 @@ class RenameCommand(VisitorBasedCodemodCommand): METADATA_DEPENDENCIES = (QualifiedNameProvider,) @staticmethod - def add_args(parser: argparse.ArgumentParser) -> None: - parser.add_argument( + def add_args(arg_parser: argparse.ArgumentParser) -> None: + arg_parser.add_argument( "--old_name", dest="old_name", required=True, help="Full dotted name of object to rename. Eg: `foo.bar.baz`", ) - parser.add_argument( + arg_parser.add_argument( "--new_name", dest="new_name", required=True, diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index 89897fb5..248d3838 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -107,13 +107,13 @@ class AddImportsVisitor(ContextAwareTransformer): # Allow for instantiation from either a context (used when multiple transforms # get chained) or from a direct instantiation. super().__init__(context) - imports: List[Tuple[str, Optional[str], Optional[str]]] = [ + imps: List[Tuple[str, Optional[str], Optional[str]]] = [ *AddImportsVisitor._get_imports_from_context(context), *imports, ] # Verify that the imports are valid - for module, obj, alias in imports: + for module, obj, alias in imps: if module == "__future__" and obj is None: raise Exception("Cannot import __future__ directly!") if module == "__future__" and alias is not None: @@ -121,21 +121,17 @@ class AddImportsVisitor(ContextAwareTransformer): # List of modules we need to ensure are imported self.module_imports: Set[str] = { - module for (module, obj, alias) in imports if obj is None and alias is None + module for (module, obj, alias) in imps if obj is None and alias is None } # List of modules we need to check for object imports on from_imports: Set[str] = { - module - for (module, obj, alias) in imports - if obj is not None and alias is None + module for (module, obj, alias) in imps if obj is not None and alias is None } # Mapping of modules we're adding to the object they should import self.module_mapping: Dict[str, Set[str]] = { module: { - o - for (m, o, n) in imports - if m == module and o is not None and n is None + o for (m, o, n) in imps if m == module and o is not None and n is None } for module in sorted(from_imports) } @@ -143,20 +139,20 @@ class AddImportsVisitor(ContextAwareTransformer): # List of aliased modules we need to ensure are imported self.module_aliases: Dict[str, str] = { module: alias - for (module, obj, alias) in imports + for (module, obj, alias) in imps if obj is None and alias is not None } # List of modules we need to check for object imports on from_imports_aliases: Set[str] = { module - for (module, obj, alias) in imports + for (module, obj, alias) in imps if obj is not None and alias is not None } # Mapping of modules we're adding to the object with alias they should import self.alias_mapping: Dict[str, List[Tuple[str, str]]] = { module: [ (o, n) - for (m, o, n) in imports + for (m, o, n) in imps if m == module and o is not None and n is not None ] for module in sorted(from_imports_aliases) @@ -357,9 +353,9 @@ class AddImportsVisitor(ContextAwareTransformer): module: sorted(aliases) for module, aliases in module_and_alias_mapping.items() } - # import ptvsd; ptvsd.set_trace() # Now, add all of the imports we need! return updated_node.with_changes( + # pyre-fixme[60]: Concatenation not yet support for multiple variadic tup... body=( *statements_before_imports, *[ diff --git a/libcst/codemod/visitors/_gather_unused_imports.py b/libcst/codemod/visitors/_gather_unused_imports.py index c2101733..d6211509 100644 --- a/libcst/codemod/visitors/_gather_unused_imports.py +++ b/libcst/codemod/visitors/_gather_unused_imports.py @@ -34,6 +34,10 @@ class GatherUnusedImportsVisitor(ContextAwareVisitor): parent import node. """ + # pyre-fixme[8]: Attribute has type + # `Tuple[typing.Type[cst.metadata.base_provider.BaseMetadataProvider[object]]]`; + # used as `Tuple[typing.Type[cst.metadata.name_provider.QualifiedNameProvider], + # typing.Type[cst.metadata.scope_provider.ScopeProvider]]`. METADATA_DEPENDENCIES: Tuple[ProviderT] = ( *GatherNamesFromStringAnnotationsVisitor.METADATA_DEPENDENCIES, ScopeProvider, diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 80ed335d..c5fb7f9e 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -7,7 +7,7 @@ import sys import textwrap import unittest -from typing import Dict, Type +from typing import Type from libcst import parse_module from libcst.codemod import Codemod, CodemodContext, CodemodTest @@ -38,13 +38,18 @@ class TestApplyAnnotationsVisitor(CodemodTest): stub: str, before: str, after: str, - **kwargs: Dict[str, bool], + **kwargs: bool, ) -> None: context = CodemodContext() ApplyTypeAnnotationsVisitor.store_stub_in_context( context, parse_module(textwrap.dedent(stub.rstrip())) ) # Test setting the flag on the codemod instance. + # pyre-fixme[6]: Expected `Optional[typing.Sequence[str]]` for 4th param but + # got `Dict[str, bool]`. + # pyre-fixme[6]: Expected `Optional[str]` for 4th param but got `Dict[str, + # bool]`. + # pyre-fixme[6]: Expected `bool` for 4th param but got `Dict[str, bool]`. self.assertCodemod(before, after, context_override=context, **kwargs) # Test setting the flag when storing the stub in the context. @@ -858,6 +863,8 @@ class TestApplyAnnotationsVisitor(CodemodTest): ), } ) + # pyre-fixme[56]: Pyre was not able to infer the type of argument + # `sys.version_info < (3, 8)` to decorator factory `unittest.skipIf`. @unittest.skipIf(sys.version_info < (3, 8), "Unsupported Python version") def test_annotate_functions_py38(self, stub: str, before: str, after: str) -> None: self.run_simple_test_case(stub=stub, before=before, after=after) @@ -1094,8 +1101,8 @@ class TestApplyAnnotationsVisitor(CodemodTest): before: str, after: str, annotation_counts: AnnotationCounts, - any_changes_applied: False, - ): + any_changes_applied: bool, + ) -> None: stub = self.make_fixture_data(stub) before = self.make_fixture_data(before) after = self.make_fixture_data(after) diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 7ae9f7d7..50e42ff7 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -28,4 +28,6 @@ def insert_header_comments(node: libcst.Module, comments: List[str]) -> libcst.M inserted_lines = [ libcst.EmptyLine(comment=libcst.Comment(value=comment)) for comment in comments ] + # pyre-fixme[60]: Concatenation not yet support for multiple variadic tuples: + # `*comment_lines, *inserted_lines, *empty_lines`. return node.with_changes(header=(*comment_lines, *inserted_lines, *empty_lines)) diff --git a/libcst/matchers/_decorators.py b/libcst/matchers/_decorators.py index 68d1eb4e..c5cde6ea 100644 --- a/libcst/matchers/_decorators.py +++ b/libcst/matchers/_decorators.py @@ -18,6 +18,7 @@ CONSTRUCTED_LEAVE_MATCHER_ATTR: str = "_leave_matcher" def call_if_inside( matcher: BaseMatcherNode, + # pyre-fixme[34]: `Variable[_CSTVisitFuncT]` isn't present in the function's parameters. ) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT]: """ A decorator for visit and leave methods inside a :class:`MatcherDecoratableTransformer` @@ -43,6 +44,7 @@ def call_if_inside( def call_if_not_inside( matcher: BaseMatcherNode, + # pyre-fixme[34]: `Variable[_CSTVisitFuncT]` isn't present in the function's parameters. ) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT]: """ A decorator for visit and leave methods inside a :class:`MatcherDecoratableTransformer` @@ -67,6 +69,7 @@ def call_if_not_inside( return inner +# pyre-fixme[34]: `Variable[_CSTVisitFuncT]` isn't present in the function's parameters. def visit(matcher: BaseMatcherNode) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT]: """ A decorator that allows a method inside a :class:`MatcherDecoratableTransformer` @@ -97,6 +100,7 @@ def visit(matcher: BaseMatcherNode) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT return inner +# pyre-fixme[34]: `Variable[_CSTVisitFuncT]` isn't present in the function's parameters. def leave(matcher: BaseMatcherNode) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT]: """ A decorator that allows a method inside a :class:`MatcherDecoratableTransformer` diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index e76877de..64670be4 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -69,6 +69,8 @@ class AbstractBaseMatcherNodeMeta(ABCMeta): matcher. """ + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, node: Type["BaseMatcherNode"]) -> "TypeOf[Type[BaseMatcherNode]]": return TypeOf(self, node) @@ -82,6 +84,8 @@ class BaseMatcherNode: several concrete matchers as options. """ + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self: _BaseMatcherNodeSelfT, other: _OtherNodeT ) -> "OneOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]]": @@ -176,11 +180,15 @@ class TypeOf(Generic[_MatcherTypeT], BaseMatcherNode): self._call_items = (args, kwargs) return self + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeMatcherTypeT ) -> "TypeOf[Union[_MatcherTypeT, _OtherNodeMatcherTypeT]]": return TypeOf[Union[_MatcherTypeT, _OtherNodeMatcherTypeT]](self, other) + # pyre-fixme[14]: `__and__` overrides method defined in `BaseMatcherNode` + # inconsistently. def __and__(self, other: _OtherNodeMatcherTypeT) -> NoReturn: left, right = type(self).__name__, other.__name__ raise TypeError( @@ -232,6 +240,8 @@ class OneOf(Generic[_MatcherT], BaseMatcherNode): """ return self._options + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": # Without a cast, pyre thinks that the below OneOf is type OneOf[object] # even though it has the types passed into it. @@ -306,6 +316,8 @@ class AllOf(Generic[_MatcherT], BaseMatcherNode): """ return self._options + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> NoReturn: raise Exception("Cannot use AllOf and OneOf in combination!") @@ -353,6 +365,8 @@ class _InverseOf(Generic[_MatcherT]): """ return self._matcher + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": # Without a cast, pyre thinks that the below OneOf is type OneOf[object] # even though it has the types passed into it. @@ -422,6 +436,8 @@ class _ExtractMatchingNode(Generic[_MatcherT]): """ return self._name + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": # Without a cast, pyre thinks that the below OneOf is type OneOf[object] # even though it has the types passed into it. @@ -494,6 +510,8 @@ class MatchIfTrue(Generic[_MatchIfTrueT]): """ return self._func + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeT ) -> "OneOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]]": @@ -518,7 +536,6 @@ class MatchIfTrue(Generic[_MatchIfTrueT]): return MatchIfTrue(lambda val: not self._func(val)) def __repr__(self) -> str: - # pyre-ignore Pyre doesn't believe that functions have a repr. return f"MatchIfTrue({repr(self._func)})" @@ -620,6 +637,8 @@ class MatchMetadata(_BaseMetadataMatcher): """ return self._value + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[MatchMetadata, _OtherNodeT]]": # Without the cast, pyre doesn't know this is valid return cast(OneOf[Union[MatchMetadata, _OtherNodeT]], OneOf(self, other)) @@ -702,6 +721,8 @@ class MatchMetadataIfTrue(_BaseMetadataMatcher): """ return self._func + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeT ) -> "OneOf[Union[MatchMetadataIfTrue, _OtherNodeT]]": @@ -719,7 +740,6 @@ class MatchMetadataIfTrue(_BaseMetadataMatcher): return MatchMetadataIfTrue(self._key, lambda val: not self._func(val)) def __repr__(self) -> str: - # pyre-ignore Pyre doesn't believe that functions have a repr. return f"MatchMetadataIfTrue(key={repr(self._key)}, func={repr(self._func)})" @@ -790,6 +810,8 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): """ return self._matcher + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: object) -> NoReturn: raise Exception("AtLeastN cannot be used in a OneOf matcher") @@ -892,6 +914,8 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): """ return self._matcher + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. + # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: object) -> NoReturn: raise Exception("AtMostN cannot be used in a OneOf matcher") @@ -980,7 +1004,6 @@ def DoesNotMatch(obj: _OtherNodeT) -> _OtherNodeT: ): # We can use the overridden __invert__ in this case. Pyre doesn't think # we can though, and casting doesn't fix the issue. - # pyre-ignore All three types above have overridden __invert__. inverse = ~obj else: # We must wrap in a _InverseOf. @@ -1065,6 +1088,7 @@ def _sequence_matches( # noqa: C901 # Base case, we have one or more matcher that wasn't matched if all(_matches_zero_nodes(m) for m in matchers): return _SequenceMatchesResult( + # pyre-ignore[16]: `MatchIfTrue` has no attribute `name`. {m.name: () for m in matchers if isinstance(m, _ExtractMatchingNode)}, (), ) @@ -1100,9 +1124,12 @@ def _sequence_matches( # noqa: C901 metadata_lookup, ) if result.sequence_capture is not None: + matched = result.matched_nodes + assert isinstance(matched, Sequence) return _SequenceMatchesResult( {**attribute_capture, **result.sequence_capture}, - (node, *result.matched_nodes), + # pyre-fixme[6]: Expected `Union[None, Sequence[libcst._n... + (node, *matched), ) # Finally, assume that this does not match the current node. # Consume the matcher but not the node. @@ -1126,9 +1153,12 @@ def _sequence_matches( # noqa: C901 metadata_lookup, ) if result.sequence_capture is not None: + matched = result.matched_nodes + assert isinstance(matched, Sequence) return _SequenceMatchesResult( {**attribute_capture, **result.sequence_capture}, - (node, *result.matched_nodes), + # pyre-fixme[6]: Expected `Union[None, Sequence[libcst._n... + (node, *matched), ) return _SequenceMatchesResult(None, None) else: @@ -1140,9 +1170,12 @@ def _sequence_matches( # noqa: C901 if attribute_capture is not None: result = _sequence_matches(nodes[1:], matchers, metadata_lookup) if result.sequence_capture is not None: + matched = result.matched_nodes + assert isinstance(matched, Sequence) return _SequenceMatchesResult( {**attribute_capture, **result.sequence_capture}, - (node, *result.matched_nodes), + # pyre-fixme[6]: Expected `Union[None, Sequence[libcst._n... + (node, *matched), ) # Now, assume that this does not match the current node. # Consume the matcher but not the node. @@ -1235,9 +1268,7 @@ def _attribute_matches( # noqa: C901 if isinstance(node, collections.abc.Sequence): # Given we've generated the types for matchers based on LibCST, we know that # this is true unless the node is badly constructed and types were ignored. - node = cast( - Sequence[Union[MaybeSentinel, RemovalSentinel, libcst.CSTNode]], node - ) + node = cast(Sequence[Union[MaybeSentinel, libcst.CSTNode]], node) if isinstance(matcher, OneOf): # We should compare against each of the sequences in the OneOf @@ -1248,7 +1279,8 @@ def _attribute_matches( # noqa: C901 if result.sequence_capture is not None: return result.sequence_capture elif isinstance(m, MatchIfTrue): - return {} if matcher.func(node) else None + # TODO: return captures + return {} if m.func(node) else None elif isinstance(matcher, AllOf): # We should compare against each of the sequences in the AllOf all_captures = {} @@ -1259,8 +1291,6 @@ def _attribute_matches( # noqa: C901 if result.sequence_capture is None: return None all_captures = {**all_captures, **result.sequence_capture} - elif isinstance(m, MatchIfTrue): - return {} if matcher.func(node) else None else: # The value in the AllOf wasn't a sequence, it can't match. return None @@ -1294,7 +1324,8 @@ def _attribute_matches( # noqa: C901 # so the only way it is wrong is if the node was badly constructed and # types were ignored. return _matches( - cast(Union[MaybeSentinel, RemovalSentinel, libcst.CSTNode], node), + cast(Union[MaybeSentinel, libcst.CSTNode], node), + # pyre-fixme[24]: Generic type `MatchIfTrue` expects 1 type parameter. cast(Union[BaseMatcherNode, MatchIfTrue, _BaseMetadataMatcher], matcher), metadata_lookup, ) @@ -1784,8 +1815,6 @@ class _ReplaceTransformer(libcst.CSTTransformer): ] if inspect.isfunction(replacement): - # pyre-ignore Pyre knows replacement is a function, but somehow drops - # the type hint from the init signature. self.replacement = replacement elif isinstance(replacement, (MaybeSentinel, RemovalSentinel)): self.replacement = lambda node, matches: copy.deepcopy(replacement) diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index fe95b385..ded6eb9d 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -126,6 +126,7 @@ def _verify_return_annotation( # it is "None". if type_hints.get("return", type(None)) is not type(None): # noqa: E721 raise MatchDecoratorMismatch( + # pyre-fixme[16]: Anonymous callable has no attribute `__qualname__`. meth.__qualname__, f"@{decorator_name} should only decorate functions that do " + "not return.", @@ -174,6 +175,7 @@ def _verify_parameter_annotations( meth_signature = signature(meth) if len(meth_signature.parameters) != expected_param_count: raise MatchDecoratorMismatch( + # pyre-fixme[16]: Anonymous callable has no attribute `__qualname__`. meth.__qualname__, f"@{decorator_name} should decorate functions which take " + f"{expected_param_count} parameter" @@ -230,6 +232,8 @@ def _check_types( # First thing first, make sure this isn't wrapping an inner class. if not ismethod(meth): raise MatchDecoratorMismatch( + # pyre-fixme[16]: Anonymous callable has no attribute + # `__qualname__`. meth.__qualname__, "Matcher decorators should only be used on methods of " + "MatcherDecoratableTransformer or " @@ -237,7 +241,6 @@ def _check_types( ) if has_invalid_top_level: raise MatchDecoratorMismatch( - # pyre-ignore This anonymous method has a qualname. meth.__qualname__, "The root matcher in a matcher decorator cannot be an " + "AtLeastN, AtMostN or MatchIfTrue matcher", @@ -314,6 +317,10 @@ def _gather_constructed_visit_funcs( _assert_not_concrete("visit", func) for matcher in matchers: casted_matcher = cast(BaseMatcherNode, matcher) + # pyre-fixme[6]: Expected + # `Sequence[typing.Callable[[cst._nodes.base.CSTNode], None]]` for 2nd + # param but got `Tuple[*Tuple[(CSTNode) -> None, ...], (CSTNode) -> + # None]`. constructed_visitors[casted_matcher] = ( *constructed_visitors.get(casted_matcher, ()), func, @@ -349,6 +356,10 @@ def _gather_constructed_leave_funcs( _assert_not_concrete("leave", func) for matcher in matchers: casted_matcher = cast(BaseMatcherNode, matcher) + # pyre-fixme[6]: Expected + # `Sequence[typing.Callable[[cst._nodes.base.CSTNode], None]]` for 2nd + # param but got `Tuple[*Tuple[(CSTNode) -> None, ...], (CSTNode) -> + # None]`. constructed_visitors[casted_matcher] = ( *constructed_visitors.get(casted_matcher, ()), func, diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 0e87063c..0a64c5a8 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -262,7 +262,7 @@ class ImportAssignment(Assignment): node: cst.CSTNode, index: int, as_name: cst.CSTNode, - ): + ) -> None: super().__init__(name, scope, node, index) self.as_name = as_name @@ -280,6 +280,7 @@ class ImportAssignment(Assignment): def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: module = self.get_module_name_for_import() results = set() + assert isinstance(self.node, (cst.ImportFrom, cst.Import)) import_names = self.node.names if not isinstance(import_names, cst.ImportStar): for name in import_names: diff --git a/libcst/metadata/tests/test_metadata_wrapper.py b/libcst/metadata/tests/test_metadata_wrapper.py index 72b09b04..ee61e14f 100644 --- a/libcst/metadata/tests/test_metadata_wrapper.py +++ b/libcst/metadata/tests/test_metadata_wrapper.py @@ -70,9 +70,6 @@ class MetadataWrapperTest(UnitTest): self.set_metadata(node, self.cache) cached_data = object() - # pyre-fixme[6]: Expected `Mapping[Type[BaseMetadataProvider[object]], - # object]` for 2nd param but got `Dict[Type[SimpleCacheMetadataProvider], - # object]`. mw = MetadataWrapper(m, cache={SimpleCacheMetadataProvider: cached_data}) pass_node = cst.ensure_type(mw.module.body[0], cst.SimpleStatementLine).body[0] self.assertEqual( diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index 072a7405..30a6acd8 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -37,11 +37,6 @@ def get_qualified_names(module_str: str) -> Set[QualifiedName]: def get_fully_qualified_names(file_path: str, module_str: str) -> Set[QualifiedName]: wrapper = cst.MetadataWrapper( cst.parse_module(dedent(module_str)), - # pyre-fixme[6]: Incompatible parameter type [6]: Expected - # `typing.Mapping[typing.Type[cst.metadata.base_provider.BaseMetadataProvider[ - # object]], object]` for 2nd parameter `cache` to call - # `cst.metadata.wrapper.MetadataWrapper.__init__` but got - # `typing.Dict[typing.Type[FullyQualifiedNameProvider], object]` cache={ FullyQualifiedNameProvider: FullyQualifiedNameProvider.gen_cache( Path(""), [file_path], None diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 1f007200..4e65de62 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -205,7 +205,13 @@ class ScopeProviderTest(UnitTest): import_node, f"The node of ImportAssignment {assignment.node} should equal to {import_node}", ) - alias = import_node.names[0] + self.assertTrue(isinstance(import_node, (cst.Import, cst.ImportFrom))) + + names = import_node.names + + self.assertFalse(isinstance(names, cst.ImportStar)) + + alias = names[0] as_name = alias.asname.name if alias.asname else alias.name self.assertEqual( assignment.as_name, @@ -323,7 +329,13 @@ class ScopeProviderTest(UnitTest): f"The node of ImportAssignment {import_assignment.node} should equal to {import_node}", ) - alias = import_node.names[imported_object_idx] + self.assertTrue(isinstance(import_node, (cst.Import, cst.ImportFrom))) + + names = import_node.names + + self.assertFalse(isinstance(names, cst.ImportStar)) + + alias = names[imported_object_idx] as_name = alias.asname.name if alias.asname else alias.name self.assertEqual( import_assignment.as_name, @@ -1761,7 +1773,7 @@ class ScopeProviderTest(UnitTest): for acc in scope.accesses: self.assertEqual( len(acc.referents), - 1 if getattr(acc.node, "value") == "x" else 0, + 1 if getattr(acc.node, "value", None) == "x" else 0, msg=( "Access for node has incorrect number of referents: " + f"{acc.node}" diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index ad1f4471..c52a7c8e 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -58,7 +58,7 @@ def _test_simple_class_helper(test: UnitTest, wrapper: MetadataWrapper) -> None: @skipIf(sys.platform == "win32", "TypeInferenceProvider doesn't support windows") class TypeInferenceProviderTest(UnitTest): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: os.chdir(TEST_SUITE_PATH) try: subprocess.run(["pyre", "-n", "start", "--no-watchman"]) @@ -66,7 +66,7 @@ class TypeInferenceProviderTest(UnitTest): raise exc @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: try: subprocess.run(["pyre", "-n", "stop"], cwd=TEST_SUITE_PATH) except subprocess.TimeoutExpired as exc: @@ -80,7 +80,7 @@ class TypeInferenceProviderTest(UnitTest): root_path=source_path.parent, paths=[source_path.name], timeout=None ) data: PyreData = json.loads(data_path.read_text()) - self.assertEqual(cache[source_path.name], data) + self.assertEqual(data, cache[source_path.name]) @data_provider( ((TEST_SUITE_PATH / "simple_class.py", TEST_SUITE_PATH / "simple_class.json"),) @@ -89,9 +89,6 @@ class TypeInferenceProviderTest(UnitTest): data: PyreData = json.loads(data_path.read_text()) wrapper = MetadataWrapper( cst.parse_module(source_path.read_text()), - # pyre-fixme[6]: Expected `Mapping[Type[BaseMetadataProvider[object]], - # Any]` for 2nd param but got `Dict[Type[TypeInferenceProvider], - # Sequence[InferredType]]`. cache={TypeInferenceProvider: data}, ) _test_simple_class_helper(self, wrapper) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index da32e92c..340d1c51 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -53,6 +53,8 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): METADATA_DEPENDENCIES = (PositionProvider,) @staticmethod + # pyre-fixme[40]: Static method `gen_cache` cannot override a non-static method + # defined in `cst.metadata.base_provider.BaseMetadataProvider`. def gen_cache( root_path: Path, paths: List[str], timeout: Optional[int] ) -> Mapping[str, object]: diff --git a/libcst/metadata/wrapper.py b/libcst/metadata/wrapper.py index d8e9f0fb..a9a712ca 100644 --- a/libcst/metadata/wrapper.py +++ b/libcst/metadata/wrapper.py @@ -178,7 +178,6 @@ class MetadataWrapper: else: metadata = self.resolve_many([provider])[provider] - # pyre-ignore Pyre doesn't recognize "CSTNode" in this contxt. return cast(Mapping["CSTNode", _T], metadata) def resolve_many( diff --git a/libcst/testing/utils.py b/libcst/testing/utils.py index 07ef26e9..f2557b66 100644 --- a/libcst/testing/utils.py +++ b/libcst/testing/utils.py @@ -133,7 +133,7 @@ def validate_provider_tests(dct: Dict[str, Any]) -> None: + "these combinations." ) - test_replacement.__name__ = member_name + setattr(test_replacement, "__name__", member_name) members_to_replace[member_name] = test_replacement for member_name, new_member in members_to_replace.items(): diff --git a/libcst/tests/pyre/.pyre_configuration b/libcst/tests/pyre/.pyre_configuration index e6deea0f..c3018faf 100644 --- a/libcst/tests/pyre/.pyre_configuration +++ b/libcst/tests/pyre/.pyre_configuration @@ -2,6 +2,5 @@ "source_directories": [ "." ], - "search_path": [], - "workers": 1 + "search_path": [] } diff --git a/libcst/tests/pyre/simple_class.json b/libcst/tests/pyre/simple_class.json index 21b48333..288bb567 100644 --- a/libcst/tests/pyre/simple_class.json +++ b/libcst/tests/pyre/simple_class.json @@ -60,7 +60,7 @@ }, "stop": { "line": 11, - "column": 24 + "column": 29 } }, "annotation": "int" @@ -190,7 +190,7 @@ }, "stop": { "line": 16, - "column": 25 + "column": 30 } }, "annotation": "int" diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 266a3933..021385af 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -63,12 +63,7 @@ class TypeVerificationVisitor(cst.CSTVisitor): end = pos.end tup = (start.line, start.column, end.line, end.column) # remove this if condition when the type issues are fixed. - if not any( - node.deep_equals(name) and tup == _tup - for (name, _tup) in { - (cst.Name("i"), (17, 21, 17, 22)), - } - ): + if node.value not in {"n", "i"}: self.test.assertIn( tup, self.lookup, @@ -95,6 +90,10 @@ class TypeVerificationVisitor(cst.CSTVisitor): class PyreIntegrationTest(UnitTest): + # pyre-fixme[56]: Pyre was not able to infer the type of argument + # `comprehension((source_path, data_path) for generators(generator((source_path, + # data_path) in zip(TEST_SUITE_PATH.glob("*.py"), TEST_SUITE_PATH.glob("*.json")) + # if )))` to decorator factory `libcst.testing.utils.data_provider`. @data_provider( ( (source_path, data_path) diff --git a/pyproject.toml b/pyproject.toml index 3f372901..1d33e75e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,4 +6,4 @@ exclude = "native/.*" excludes = ["native/", "stubs/"] [build-system] -requires = ["setuptools", "wheel", "setuptools-rust"] +requires = ["setuptools", "wheel", "setuptools-rust"] \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 3c4afca7..47164281 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -9,7 +9,7 @@ jupyter>=1.0.0 maturin>=0.8.3,<0.9 nbsphinx>=0.4.2 prompt-toolkit>=2.0.9 -pyre-check==0.9.3 +pyre-check==0.9.9; platform_system != "Windows" setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 tox>=3.18.1 diff --git a/stubs/libcst/native.pyi b/stubs/libcst/native.pyi new file mode 100644 index 00000000..4741266b --- /dev/null +++ b/stubs/libcst/native.pyi @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Optional +import libcst + +def parse_module(source: str, encoding: Optional[str]) -> libcst.Module: ... +def parse_expression(source: str) -> libcst.BaseExpression: ... +def parse_statement(source: str) -> libcst.BaseStatement: ... diff --git a/stubs/tokenize.pyi b/stubs/tokenize.pyi index 2db19c04..32347ccc 100644 --- a/stubs/tokenize.pyi +++ b/stubs/tokenize.pyi @@ -82,7 +82,6 @@ Comment: str = ... Ignore: str = ... Name: str = ... -# pyre-ignore Pyre doesn't like that we're subclassing from tuple here class TokenInfo(Tuple[int, str, Tuple[int, int], Tuple[int, int], int]): exact_type: int = ... type: int = ... From 601db54880bd19031aaddd889ff8864d646b5984 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 5 Jan 2022 12:24:31 -0800 Subject: [PATCH 175/632] Add instructions to codegen test failures (#582) * Add instructions to codegen test failures * Run ufmt * Fix lint errors, mention tox -e codegen --- libcst/codegen/tests/test_codegen_clean.py | 25 ++++++++++++++-------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/libcst/codegen/tests/test_codegen_clean.py b/libcst/codegen/tests/test_codegen_clean.py index b346570d..b26a85cd 100644 --- a/libcst/codegen/tests/test_codegen_clean.py +++ b/libcst/codegen/tests/test_codegen_clean.py @@ -14,6 +14,19 @@ from libcst.testing.utils import UnitTest class TestCodegenClean(UnitTest): + def assert_code_matches( + self, + old_code: str, + new_code: str, + module_name: str, + ) -> None: + self.assertTrue( + old_code == new_code, + f"{module_name} needs new codegen, see " + + "`python -m libcst.codegen.generate --help` " + + "for instructions, or run `tox -e codegen`", + ) + def test_codegen_clean_visitor_functions(self) -> None: """ Verifies that codegen of visitor functions would not result in a @@ -44,9 +57,7 @@ class TestCodegenClean(UnitTest): old_code = fp.read() # Now that we've done simple codegen, verify that it matches. - self.assertTrue( - old_code == new_code, "libcst._typed_visitor needs new codegen!" - ) + self.assert_code_matches(old_code, new_code, "libcst._typed_visitor") def test_codegen_clean_matcher_classes(self) -> None: """ @@ -78,9 +89,7 @@ class TestCodegenClean(UnitTest): old_code = fp.read() # Now that we've done simple codegen, verify that it matches. - self.assertTrue( - old_code == new_code, "libcst.matchers.__init__ needs new codegen!" - ) + self.assert_code_matches(old_code, new_code, "libcst.matchers.__init__") def test_codegen_clean_return_types(self) -> None: """ @@ -113,6 +122,4 @@ class TestCodegenClean(UnitTest): old_code = fp.read() # Now that we've done simple codegen, verify that it matches. - self.assertTrue( - old_code == new_code, "libcst.matchers._return_types needs new codegen!" - ) + self.assert_code_matches(old_code, new_code, "libcst.matchers._return_types") From 3578f2fc3d60ce9dfda4e1c38e712e9c3c30bfbd Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 5 Jan 2022 13:26:51 -0800 Subject: [PATCH 176/632] Add some color to children-vs-codegen error (#583) It took me some time to track down the root cause of `children` not matching codegen, having the error message directly hint that visit and codegen are probably mimatched (my visit was running out-of-order) will likely help newbies get going faster. --- libcst/_nodes/tests/base.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libcst/_nodes/tests/base.py b/libcst/_nodes/tests/base.py index 3ac22890..8245e301 100644 --- a/libcst/_nodes/tests/base.py +++ b/libcst/_nodes/tests/base.py @@ -137,7 +137,9 @@ class CSTNodeTest(UnitTest): codegen_children, msg=( "The list of children we got from `node.children` differs from the " - + "children that were visited by `node._codegen`." + + "children that were visited by `node._codegen`. This is probably " + + "due to a mismatch between _visit_and_replace_children and " + + "_codegen_impl." ), ) From 6615ccb0ce66422e03a1efa7bbad46f80d4fdc90 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 6 Jan 2022 15:56:14 +0000 Subject: [PATCH 177/632] pin to git rev (#587) --- native/Cargo.lock | 6 +++--- native/libcst/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 92b17afe..fbeaeb78 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -451,7 +451,7 @@ dependencies = [ [[package]] name = "peg" version = "0.7.0" -source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" +source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" dependencies = [ "peg-macros", "peg-runtime", @@ -460,7 +460,7 @@ dependencies = [ [[package]] name = "peg-macros" version = "0.7.0" -source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" +source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" dependencies = [ "peg-runtime", "proc-macro2", @@ -470,7 +470,7 @@ dependencies = [ [[package]] name = "peg-runtime" version = "0.7.0" -source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" +source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" [[package]] name = "plotters" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 04b8013c..2247abf3 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -30,7 +30,7 @@ trace = ["peg/trace"] paste = "1.0.4" pyo3 = "0.14.4" thiserror = "1.0.23" -peg = { git = "https://github.com/kevinmehall/rust-peg" } +peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "4b146b4b78a80c07e43d7ace2d97f65bfde279a8" } chic = "1.2.2" itertools = "0.10.0" once_cell = "1.5.2" From 2f7f174daa001481af8be0d4a5e5de0f2fad542f Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 6 Jan 2022 11:26:12 -0500 Subject: [PATCH 178/632] Remove setup matrix (#586) --- .github/workflows/build.yml | 66 +++++++++++++------------------------ 1 file changed, 22 insertions(+), 44 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9401bf1d..6ae3df77 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,32 +7,8 @@ on: pull_request: jobs: -# Install and cache dependencies - setup: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - # Run unittests test: - needs: setup runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -50,9 +26,10 @@ jobs: with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Validate Dependencies + - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: exit 1 + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - if: ${{ matrix.parser == 'native' }} uses: actions-rs/toolchain@v1 with: @@ -64,7 +41,6 @@ jobs: # Run linters lint: - needs: setup runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 @@ -76,16 +52,16 @@ jobs: with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Validate Dependencies + - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: exit 1 + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - run: flake8 - run: ufmt check . - run: python3 -m fixit.cli.run_rules # Run pyre typechecker typecheck: - needs: setup runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 @@ -97,9 +73,10 @@ jobs: with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Validate Dependencies + - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: exit 1 + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Get Python site-packages id: python-info run: | @@ -122,7 +99,6 @@ jobs: # Upload test coverage coverage: - needs: setup runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 @@ -134,9 +110,10 @@ jobs: with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Validate Dependencies + - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: exit 1 + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Generate Coverage run: | coverage run setup.py test @@ -154,7 +131,6 @@ jobs: # Build the docs docs: - needs: setup runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 @@ -166,9 +142,10 @@ jobs: with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Validate Dependencies + - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: exit 1 + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - uses: ts-graphviz/setup-graphviz@v1 - run: sphinx-build docs/source/ docs/build/ - name: Archive Docs @@ -180,7 +157,6 @@ jobs: # Build python package build: name: Build wheels on ${{ matrix.os }}/${{ matrix.vers }} - needs: setup runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -218,9 +194,10 @@ jobs: with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Validate Dependencies + - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: exit 1 + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Disable scmtools local scheme if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- @@ -253,9 +230,10 @@ jobs: with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Validate Dependencies + - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: exit 1 + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Disable scmtools local scheme run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV @@ -300,7 +278,7 @@ jobs: with: token: ${{ secrets.GITHUB_TOKEN }} args: --manifest-path=native/Cargo.toml --all-features - + rustfmt: name: Rustfmt runs-on: ubuntu-latest From 6434ca8512160058024655557612769ce189db65 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 6 Jan 2022 21:09:27 -0500 Subject: [PATCH 179/632] Remove tox references (#588) --- .pyre_configuration.example | 1 - CONTRIBUTING.md | 6 +- README.rst | 9 ++- libcst/codegen/generate.py | 9 ++- libcst/codegen/tests/test_codegen_clean.py | 14 ++-- requirements-dev.txt | 1 - tox.ini | 74 ---------------------- 7 files changed, 21 insertions(+), 93 deletions(-) delete mode 100644 tox.ini diff --git a/.pyre_configuration.example b/.pyre_configuration.example index 0223ce37..37d88928 100644 --- a/.pyre_configuration.example +++ b/.pyre_configuration.example @@ -6,7 +6,6 @@ "stubs" ], "exclude": [ - ".*/\\.tox/.*", ".*/native/.*" ], "strict": true diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ef6fcded..9af09650 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,7 +3,7 @@ We want to make contributing to this project as easy and transparent as possible. ## Our Development Process -This github repo is the source of truth and all changes need to be reviewed in +This github repo is the source of truth and all changes need to be reviewed in pull requests. ## Pull Requests @@ -12,7 +12,7 @@ We actively welcome your pull requests. 1. Fork the repo and create your branch from `main`. 2. If you've added code that should be tested, add tests. 3. If you've changed APIs, update the documentation. -4. Ensure the test suite passes by `tox test`. +4. Ensure the test suite passes by `python -m unittest`. 5. Make sure your code lints. 6. If you haven't already, complete the Contributor License Agreement ("CLA"). @@ -30,7 +30,7 @@ Facebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe disclosure of security bugs. In those cases, please go through the process outlined on that page and do not file a public issue. -## Coding Style +## Coding Style We use flake8 and ufmt to enforce coding style. ## License diff --git a/README.rst b/README.rst index 2ddc245d..755bec86 100644 --- a/README.rst +++ b/README.rst @@ -155,14 +155,13 @@ changes to be conformant, run the following in the root: .. code-block:: shell - tox -e autofix + ufmt format && python -m fixit.cli.apply_fix -To run all tests, you'll need to install `tox `_ -and do the following in the root: +To run all tests, you'll need to do the following in the root: .. code-block:: shell - tox -e py37 + python -m unittest You can also run individual tests by using unittest and specifying a module like this: @@ -198,7 +197,7 @@ To generate documents, do the following in the root: .. code-block:: shell - tox -e docs + sphinx-build docs/source/ docs/build/ Future ====== diff --git a/libcst/codegen/generate.py b/libcst/codegen/generate.py index 82e08c43..74a418f5 100644 --- a/libcst/codegen/generate.py +++ b/libcst/codegen/generate.py @@ -144,11 +144,16 @@ def main(cli_args: List[str]) -> int: parser = argparse.ArgumentParser(description="Generate code for libcst.") parser.add_argument( "system", - metavar="SYSTEM", - help='System to generate code for. Valid values include: "visitors", "matchers"', + choices=["all", "visitors", "matchers", "return_types"], + help="System to generate code for.", type=str, ) args = parser.parse_args(cli_args) + if args.system == "all": + codegen_visitors() + codegen_matchers() + codegen_return_types() + return 0 if args.system == "visitors": codegen_visitors() return 0 diff --git a/libcst/codegen/tests/test_codegen_clean.py b/libcst/codegen/tests/test_codegen_clean.py index b26a85cd..7b71ba36 100644 --- a/libcst/codegen/tests/test_codegen_clean.py +++ b/libcst/codegen/tests/test_codegen_clean.py @@ -24,14 +24,14 @@ class TestCodegenClean(UnitTest): old_code == new_code, f"{module_name} needs new codegen, see " + "`python -m libcst.codegen.generate --help` " - + "for instructions, or run `tox -e codegen`", + + "for instructions, or run `python -m libcst.codegen.generate all`", ) def test_codegen_clean_visitor_functions(self) -> None: """ Verifies that codegen of visitor functions would not result in a - changed file. If this test fails, please run 'tox -e codegen' to - generate new files. + changed file. If this test fails, please run 'python -m libcst.codegen.generate all' + to generate new files. """ new_code = clean_generated_code("\n".join(visitor_codegen.generated_code)) new_file = os.path.join( @@ -62,8 +62,8 @@ class TestCodegenClean(UnitTest): def test_codegen_clean_matcher_classes(self) -> None: """ Verifies that codegen of matcher classes would not result in a - changed file. If this test fails, please run 'tox -e codegen' to - generate new files. + changed file. If this test fails, please run 'python -m libcst.codegen.generate all' + to generate new files. """ new_code = clean_generated_code("\n".join(matcher_codegen.generated_code)) new_file = os.path.join( @@ -94,8 +94,8 @@ class TestCodegenClean(UnitTest): def test_codegen_clean_return_types(self) -> None: """ Verifies that codegen of return types would not result in a - changed file. If this test fails, please run 'tox -e codegen' to - generate new files. + changed file. If this test fails, please run 'python -m libcst.codegen.generate all' + to generate new files. """ new_code = clean_generated_code("\n".join(type_codegen.generated_code)) new_file = os.path.join( diff --git a/requirements-dev.txt b/requirements-dev.txt index 47164281..388c7556 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,7 +12,6 @@ prompt-toolkit>=2.0.9 pyre-check==0.9.9; platform_system != "Windows" setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 -tox>=3.18.1 ufmt==1.3 usort==1.0.0rc1 setuptools-rust>=0.12.1 diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 1c205721..00000000 --- a/tox.ini +++ /dev/null @@ -1,74 +0,0 @@ -[tox] -envlist = py36, py37, py38, py39, lint, docs - -[gh-actions] -python = - 3.6: py36 - 3.7: py37 - 3.8: py38 - 3.9: py39 - -[testenv] -deps = - -rrequirements.txt - -rrequirements-dev.txt -commands = - python -m unittest {posargs} - -[testenv:test] - -[testenv:lint] -commands = - flake8 {posargs} - ufmt check {posargs:.} - python3 -m fixit.cli.run_rules - -[testenv:docs] -commands = - sphinx-build {posargs:docs/source/ docs/build/} - -[testenv:autofix] -commands = - flake8 {posargs} - ufmt format {posargs:.} - python3 -m fixit.cli.apply_fix - -[testenv:pyre] -usedevelop=True -setenv = PYTHONPATH = {toxinidir} -allowlist_externals= - cp -commands = - cp .github/workflows/.pyre_configuration . - pyre --version - pyre check - python libcst/tests/test_pyre_integration.py - git diff --exit-code - -[testenv:fuzz36] -basepython = python3.6 -setenv = - HYPOTHESIS = 1 -commands = - python3.6 -m unittest libcst/tests/test_fuzz.py - -[testenv:fuzz37] -basepython = python3.7 -setenv = - HYPOTHESIS = 1 -commands = - python3.7 -m unittest libcst/tests/test_fuzz.py - -[testenv:fuzz38] -basepython = python3.8 -setenv = - HYPOTHESIS = 1 -commands = - python3.8 -m unittest libcst/tests/test_fuzz.py - - -[testenv:codegen] -commands = - python3 -m libcst.codegen.generate visitors - python3 -m libcst.codegen.generate return_types - python3 -m libcst.codegen.generate matchers From 2f75246c3adb1db62ab578fcca11cc3137bd0639 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 7 Jan 2022 01:29:51 -0800 Subject: [PATCH 180/632] Fix variable name in lambda (#590) --- native/libcst/src/nodes/statement.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 7d7da873..319e6f13 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -1598,7 +1598,7 @@ impl<'a> Inflate<'a> for ClassDef<'a> { self.lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; self.bases = self.bases.inflate(config)?; self.keywords = self.keywords.inflate(config)?; - self.rpar = self.rpar.map(|lpar| lpar.inflate(config)).transpose()?; + self.rpar = self.rpar.map(|rpar| rpar.inflate(config)).transpose()?; // TODO: set whitespace_after_arg for last arg? } From 9f6ff017f204a1dc305c5c4effa96f14038bbfb9 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Fri, 7 Jan 2022 07:34:50 -0500 Subject: [PATCH 181/632] Use pyre site-package feature (#589) * Use pyre site-package feature * Update readme, remove example --- .github/workflows/build.yml | 13 ------------- .gitignore | 1 - ...figuration.template => .pyre_configuration | 2 +- .pyre_configuration.example | 12 ------------ README.rst | 19 +++++++------------ 5 files changed, 8 insertions(+), 39 deletions(-) rename .github/workflows/.pyre_configuration.template => .pyre_configuration (75%) delete mode 100644 .pyre_configuration.example diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6ae3df77..68359560 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -77,19 +77,6 @@ jobs: if: steps.cache.outputs.cache-hit != 'true' run: | pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - name: Get Python site-packages - id: python-info - run: | - echo "::set-output name=SITE_PACKAGES::$(python -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')" - - name: Configure Pyre - id: pyre_template - uses: chuhlomin/render-template@v1.2 - with: - template: .github/workflows/.pyre_configuration.template - vars: | - python_site_packages: ${{ steps.python-info.outputs.SITE_PACKAGES }} - - name: Write Pyre Config - run: echo '${{ steps.pyre_template.outputs.result }}' > .pyre_configuration - name: Make sure Pyre uses the working copy run: pip install -e . - run: pyre --version diff --git a/.gitignore b/.gitignore index dc2a1b30..9bb9370a 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,5 @@ build/ libcst/_version.py .coverage .hypothesis/ -.pyre_configuration .python-version target/ diff --git a/.github/workflows/.pyre_configuration.template b/.pyre_configuration similarity index 75% rename from .github/workflows/.pyre_configuration.template rename to .pyre_configuration index b2ed4b8d..ae37b031 100644 --- a/.github/workflows/.pyre_configuration.template +++ b/.pyre_configuration @@ -6,7 +6,7 @@ "." ], "search_path": [ - "stubs", "{{ .python_site_packages }}" + "stubs", {"site-package": "setuptools_rust"} ], "workers": 3, "strict": true diff --git a/.pyre_configuration.example b/.pyre_configuration.example deleted file mode 100644 index 37d88928..00000000 --- a/.pyre_configuration.example +++ /dev/null @@ -1,12 +0,0 @@ -{ - "source_directories": [ - "." - ], - "search_path": [ - "stubs" - ], - "exclude": [ - ".*/native/.*" - ], - "strict": true -} diff --git a/README.rst b/README.rst index 755bec86..9ac213c3 100644 --- a/README.rst +++ b/README.rst @@ -175,24 +175,19 @@ for more examples of how to run tests. We use `Pyre `_ for type-checking. -To set up pyre check environment: - -1. Copy the example Pyre config: ``cp .pyre_configuration.example .pyre_configuration``. -2. In the config file, add your venv site-packages dir to "search_path". (e.g. add "/workspace/libcst-env/lib/python3.7/site-packages") - Note: venv dir must **not** be inside the libcst dir -3. Remove installed LibCST and install from the source code: - -.. code-block:: shell - - pip uninstall -y libcst - pip install -e . - To verify types for the library, do the following in the root: .. code-block:: shell pyre check +*Note*: You may need to remove installed LibCST and install from the source code prior to type checking: + +.. code-block:: shell + + pip uninstall -y libcst + pip install -e . + To generate documents, do the following in the root: .. code-block:: shell From 13370227703fe3171e94c57bdd7977f3af696b73 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 7 Jan 2022 12:21:58 -0800 Subject: [PATCH 182/632] [WIP] Support Parenthesized With Statements (#584) On the python side, we can add parentheses from MaybeSentinel.DEFAULT if the whitespace requires it. On the rust side, we support the new grammar but codegen will only add explicitly included parentheses for now - it should be possible to match python behavior but it's not urgent so I've left a TODO --- libcst/_nodes/statement.py | 57 +++++++++++-- libcst/_nodes/tests/test_with.py | 122 ++++++++++++++++++++++----- libcst/_typed_visitor.py | 16 ++++ libcst/matchers/__init__.py | 12 +++ native/libcst/src/nodes/statement.rs | 56 +++++++++--- native/libcst/src/parser/grammar.rs | 18 +++- 6 files changed, 243 insertions(+), 38 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 13519498..9493f57c 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -57,6 +57,7 @@ from libcst._nodes.op import ( from libcst._nodes.whitespace import ( BaseParenthesizableWhitespace, EmptyLine, + ParenthesizedWhitespace, SimpleWhitespace, TrailingWhitespace, ) @@ -2017,24 +2018,47 @@ class With(BaseCompoundStatement): #: Sequence of empty lines appearing before this with statement. leading_lines: Sequence[EmptyLine] = () + #: Optional open parenthesis for multi-line with bindings + lpar: Union[LeftParen, MaybeSentinel] = MaybeSentinel.DEFAULT + + #: Optional close parenthesis for multi-line with bindings + rpar: Union[RightParen, MaybeSentinel] = MaybeSentinel.DEFAULT + #: Whitespace after the ``with`` keyword and before the first item. whitespace_after_with: SimpleWhitespace = SimpleWhitespace.field(" ") #: Whitespace after the last item and before the colon. whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") + def _validate_parens(self) -> None: + if isinstance(self.lpar, MaybeSentinel) and isinstance(self.rpar, RightParen): + raise CSTValidationError( + "Do not mix concrete LeftParen/RightParen with MaybeSentinel." + ) + if isinstance(self.lpar, LeftParen) and isinstance(self.rpar, MaybeSentinel): + raise CSTValidationError( + "Do not mix concrete LeftParen/RightParen with MaybeSentinel." + ) + def _validate(self) -> None: + self._validate_parens() if len(self.items) == 0: raise CSTValidationError( "A With statement must have at least one WithItem." ) - if self.items[-1].comma != MaybeSentinel.DEFAULT: + if ( + isinstance(self.rpar, MaybeSentinel) + and self.items[-1].comma != MaybeSentinel.DEFAULT + ): raise CSTValidationError( - "The last WithItem in a With cannot have a trailing comma." + "The last WithItem in an unparenthesized With cannot have a trailing comma." ) - if self.whitespace_after_with.empty and not self.items[ - 0 - ].item._safe_to_use_with_word_operator(ExpressionPosition.RIGHT): + if self.whitespace_after_with.empty and not ( + isinstance(self.lpar, LeftParen) + or self.items[0].item._safe_to_use_with_word_operator( + ExpressionPosition.RIGHT + ) + ): raise CSTValidationError("Must have at least one space after with keyword.") def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "With": @@ -2048,7 +2072,9 @@ class With(BaseCompoundStatement): whitespace_after_with=visit_required( self, "whitespace_after_with", self.whitespace_after_with, visitor ), + lpar=visit_sentinel(self, "lpar", self.lpar, visitor), items=visit_sequence(self, "items", self.items, visitor), + rpar=visit_sentinel(self, "rpar", self.rpar, visitor), whitespace_before_colon=visit_required( self, "whitespace_before_colon", self.whitespace_before_colon, visitor ), @@ -2060,15 +2086,36 @@ class With(BaseCompoundStatement): ll._codegen(state) state.add_indent_tokens() + needs_paren = False + for item in self.items: + comma = item.comma + if isinstance(comma, Comma): + if isinstance( + comma.whitespace_after, + (EmptyLine, TrailingWhitespace, ParenthesizedWhitespace), + ): + needs_paren = True + break + with state.record_syntactic_position(self, end_node=self.body): asynchronous = self.asynchronous if asynchronous is not None: asynchronous._codegen(state) state.add_token("with") self.whitespace_after_with._codegen(state) + lpar = self.lpar + if isinstance(lpar, LeftParen): + lpar._codegen(state) + elif needs_paren: + state.add_token("(") last_item = len(self.items) - 1 for i, item in enumerate(self.items): item._codegen(state, default_comma=(i != last_item)) + rpar = self.rpar + if isinstance(rpar, RightParen): + rpar._codegen(state) + elif needs_paren: + state.add_token(")") self.whitespace_before_colon._codegen(state) state.add_token(":") self.body._codegen(state) diff --git a/libcst/_nodes/tests/test_with.py b/libcst/_nodes/tests/test_with.py index 2246bc2d..1310b3f8 100644 --- a/libcst/_nodes/tests/test_with.py +++ b/libcst/_nodes/tests/test_with.py @@ -7,6 +7,7 @@ from typing import Any import libcst as cst from libcst import parse_statement, PartialParserConfig +from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange @@ -14,6 +15,8 @@ from libcst.testing.utils import data_provider class WithTest(CSTNodeTest): + maxDiff: int = 2000 + @data_provider( ( # Simple with block @@ -138,25 +141,6 @@ class WithTest(CSTNodeTest): "parser": parse_statement, "expected_position": CodeRange((2, 0), (2, 24)), }, - # Weird spacing rules - { - "node": cst.With( - ( - cst.WithItem( - cst.Call( - cst.Name("context_mgr"), - lpar=(cst.LeftParen(),), - rpar=(cst.RightParen(),), - ) - ), - ), - cst.SimpleStatementSuite((cst.Pass(),)), - whitespace_after_with=cst.SimpleWhitespace(""), - ), - "code": "with(context_mgr()): pass\n", - "parser": parse_statement, - "expected_position": CodeRange((1, 0), (1, 25)), - }, # Whitespace { "node": cst.With( @@ -178,6 +162,63 @@ class WithTest(CSTNodeTest): "parser": parse_statement, "expected_position": CodeRange((1, 0), (1, 36)), }, + # Weird spacing rules, that parse differently depending on whether + # we are using a grammar that included parenthesized with statements. + { + "node": cst.With( + ( + cst.WithItem( + cst.Call( + cst.Name("context_mgr"), + lpar=() if is_native() else (cst.LeftParen(),), + rpar=() if is_native() else (cst.RightParen(),), + ) + ), + ), + cst.SimpleStatementSuite((cst.Pass(),)), + lpar=(cst.LeftParen() if is_native() else MaybeSentinel.DEFAULT), + rpar=(cst.RightParen() if is_native() else MaybeSentinel.DEFAULT), + whitespace_after_with=cst.SimpleWhitespace(""), + ), + "code": "with(context_mgr()): pass\n", + "parser": parse_statement, + "expected_position": CodeRange((1, 0), (1, 25)), + }, + # Multi-line parenthesized with. + { + "node": cst.With( + ( + cst.WithItem( + cst.Call(cst.Name("foo")), + comma=cst.Comma( + whitespace_after=cst.ParenthesizedWhitespace( + first_line=cst.TrailingWhitespace( + whitespace=cst.SimpleWhitespace( + value="", + ), + comment=None, + newline=cst.Newline( + value=None, + ), + ), + empty_lines=[], + indent=True, + last_line=cst.SimpleWhitespace( + value=" ", + ), + ) + ), + ), + cst.WithItem(cst.Call(cst.Name("bar")), comma=cst.Comma()), + ), + cst.SimpleStatementSuite((cst.Pass(),)), + lpar=cst.LeftParen(whitespace_after=cst.SimpleWhitespace(" ")), + rpar=cst.RightParen(whitespace_before=cst.SimpleWhitespace(" ")), + ), + "code": ("with ( foo(),\n" " bar(), ): pass\n"), # noqa + "parser": parse_statement if is_native() else None, + "expected_position": CodeRange((1, 0), (2, 21)), + }, ) ) def test_valid(self, **kwargs: Any) -> None: @@ -201,7 +242,8 @@ class WithTest(CSTNodeTest): ), cst.IndentedBlock((cst.SimpleStatementLine((cst.Pass(),)),)), ), - "expected_re": "The last WithItem in a With cannot have a trailing comma", + "expected_re": "The last WithItem in an unparenthesized With cannot " + + "have a trailing comma.", }, { "get_node": lambda: cst.With( @@ -211,6 +253,26 @@ class WithTest(CSTNodeTest): ), "expected_re": "Must have at least one space after with keyword", }, + { + "get_node": lambda: cst.With( + (cst.WithItem(cst.Call(cst.Name("context_mgr"))),), + cst.SimpleStatementSuite((cst.Pass(),)), + whitespace_after_with=cst.SimpleWhitespace(""), + lpar=cst.LeftParen(), + ), + "expected_re": "Do not mix concrete LeftParen/RightParen with " + + "MaybeSentinel", + }, + { + "get_node": lambda: cst.With( + (cst.WithItem(cst.Call(cst.Name("context_mgr"))),), + cst.SimpleStatementSuite((cst.Pass(),)), + whitespace_after_with=cst.SimpleWhitespace(""), + rpar=cst.RightParen(), + ), + "expected_re": "Do not mix concrete LeftParen/RightParen with " + + "MaybeSentinel", + }, ) ) def test_invalid(self, **kwargs: Any) -> None: @@ -234,3 +296,23 @@ class WithTest(CSTNodeTest): if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) + + def test_adding_parens(self) -> None: + node = cst.With( + ( + cst.WithItem( + cst.Call(cst.Name("foo")), + comma=cst.Comma( + whitespace_after=cst.ParenthesizedWhitespace(), + ), + ), + cst.WithItem(cst.Call(cst.Name("bar")), comma=cst.Comma()), + ), + cst.SimpleStatementSuite((cst.Pass(),)), + lpar=cst.LeftParen(whitespace_after=cst.SimpleWhitespace(" ")), + rpar=cst.RightParen(whitespace_before=cst.SimpleWhitespace(" ")), + ) + module = cst.Module([]) + self.assertEqual( + module.code_for_node(node), ("with ( foo(),\n" "bar(), ): pass\n") # noqa + ) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index cea085d0..a880bee4 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -5379,6 +5379,22 @@ class CSTTypedBaseFunctions: def leave_With_leading_lines(self, node: "With") -> None: pass + @mark_no_op + def visit_With_lpar(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_lpar(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_rpar(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_rpar(self, node: "With") -> None: + pass + @mark_no_op def visit_With_whitespace_after_with(self, node: "With") -> None: pass diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 655bc947..9602de41 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -15411,6 +15411,18 @@ class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): ] ], ] = DoNotCare() + lpar: Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] = DoNotCare() + rpar: Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] = DoNotCare() whitespace_after_with: Union[ SimpleWhitespaceMatchType, DoNotCareSentinel, diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 319e6f13..21725343 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -1927,6 +1927,19 @@ pub struct WithItem<'a> { pub comma: Option>, } +impl<'a> WithItem<'a> { + fn inflate_withitem(mut self, config: &Config<'a>, is_last: bool) -> Result { + self.item = self.item.inflate(config)?; + self.asname = self.asname.inflate(config)?; + self.comma = if is_last { + self.comma.map(|c| c.inflate_before(config)).transpose()? + } else { + self.comma.map(|c| c.inflate(config)).transpose()? + }; + Ok(self) + } +} + impl<'a> Codegen<'a> for WithItem<'a> { fn codegen(&self, state: &mut CodegenState<'a>) { self.item.codegen(state); @@ -1948,21 +1961,14 @@ impl<'a> WithComma<'a> for WithItem<'a> { } } -impl<'a> Inflate<'a> for WithItem<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.item = self.item.inflate(config)?; - self.asname = self.asname.inflate(config)?; - self.comma = self.comma.inflate(config)?; - Ok(self) - } -} - #[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct With<'a> { pub items: Vec>, pub body: Suite<'a>, pub asynchronous: Option>, pub leading_lines: Vec>, + pub lpar: Option>, + pub rpar: Option>, pub whitespace_after_with: SimpleWhitespace<'a>, pub whitespace_before_colon: SimpleWhitespace<'a>, @@ -1983,6 +1989,18 @@ impl<'a> Codegen<'a> for With<'a> { } state.add_token("with"); self.whitespace_after_with.codegen(state); + + // TODO: Force parens whenever there are newlines in + // the commas of self.items. + // + // For now, only the python API does this. + let need_parens = false; + if let Some(lpar) = &self.lpar { + lpar.codegen(state); + } else if need_parens { + state.add_token("("); + } + let len = self.items.len(); for (i, item) in self.items.iter().enumerate() { item.codegen(state); @@ -1990,6 +2008,13 @@ impl<'a> Codegen<'a> for With<'a> { state.add_token(", "); } } + + if let Some(rpar) = &self.rpar { + rpar.codegen(state); + } else if need_parens { + state.add_token(")"); + } + self.whitespace_before_colon.codegen(state); state.add_token(":"); self.body.codegen(state); @@ -2027,7 +2052,18 @@ impl<'a> Inflate<'a> for With<'a> { self.whitespace_after_with = parse_simple_whitespace(config, &mut (*self.with_tok).whitespace_after.borrow_mut())?; - self.items = self.items.inflate(config)?; + self.lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; + let len = self.items.len(); + self.items = self + .items + .into_iter() + .enumerate() + .map(|(idx, el)| el.inflate_withitem(config, idx + 1 == len)) + .collect::>>()?; + if !self.items.is_empty() { + // rpar only has whitespace if items is non empty + self.rpar = self.rpar.map(|rpar| rpar.inflate(config)).transpose()?; + } self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index c881be57..70d2f968 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -473,13 +473,21 @@ parser! { // With statement rule with_stmt() -> With<'a> - = kw:lit("with") items:separated(, ) + = kw:lit("with") l:lpar() items:separated_trailer(, ) r:rpar() col:lit(":") b:block() { - make_with(None, kw, comma_separate(items.0, items.1, None), col, b) + make_with(None, kw, Some(l), comma_separate(items.0, items.1, items.2), Some(r), col, b) + } + / kw:lit("with") items:separated(, ) + col:lit(":") b:block() { + make_with(None, kw, None, comma_separate(items.0, items.1, None), None, col, b) + } + / asy:tok(Async, "ASYNC") kw:lit("with") l:lpar() items:separated_trailer(, ) r:rpar() + col:lit(":") b:block() { + make_with(Some(asy), kw, Some(l), comma_separate(items.0, items.1, items.2), Some(r), col, b) } / asy:tok(Async, "ASYNC") kw:lit("with") items:separated(, ) col:lit(":") b:block() { - make_with(Some(asy), kw, comma_separate(items.0, items.1, None), col, b) + make_with(Some(asy), kw, None, comma_separate(items.0, items.1, None), None, col, b) } rule with_item() -> WithItem<'a> @@ -3218,7 +3226,9 @@ fn make_with_item<'a>( fn make_with<'a>( async_tok: Option>, with_tok: TokenRef<'a>, + lpar: Option>, items: Vec>, + rpar: Option>, colon_tok: TokenRef<'a>, body: Suite<'a>, ) -> With<'a> { @@ -3230,6 +3240,8 @@ fn make_with<'a>( body, asynchronous, leading_lines: Default::default(), + lpar, + rpar, whitespace_after_with: Default::default(), whitespace_before_colon: Default::default(), async_tok, From 8652974d8798ad08c73846c7bf8ebbd14dc02b02 Mon Sep 17 00:00:00 2001 From: Martin DeMello Date: Sat, 8 Jan 2022 02:18:10 -0800 Subject: [PATCH 183/632] Support relative imports in AddImportsVisitor. (#585) * Support relative imports in AddImportsVisitor. * Adds an Import dataclass to represent a single imported object * Refactors AddImportsVisitor to pass around Import objects * Separates out the main logic in get_absolute_module_for_import so that it can be used to resolve relative module names outside of a cst.Import node * Resolves relative module names in AddImportsVisitor if we have a current module name set. Fixes #578 --- libcst/codemod/visitors/__init__.py | 2 + libcst/codemod/visitors/_add_imports.py | 51 +-- .../visitors/_apply_type_annotations.py | 3 +- libcst/codemod/visitors/_imports.py | 43 +++ .../visitors/tests/test_add_imports.py | 313 +++++++++++++++--- libcst/helpers/__init__.py | 2 + libcst/helpers/_statement.py | 20 +- 7 files changed, 358 insertions(+), 76 deletions(-) create mode 100644 libcst/codemod/visitors/_imports.py diff --git a/libcst/codemod/visitors/__init__.py b/libcst/codemod/visitors/__init__.py index a1416505..1cbbd2c8 100644 --- a/libcst/codemod/visitors/__init__.py +++ b/libcst/codemod/visitors/__init__.py @@ -12,6 +12,7 @@ from libcst.codemod.visitors._gather_string_annotation_names import ( GatherNamesFromStringAnnotationsVisitor, ) from libcst.codemod.visitors._gather_unused_imports import GatherUnusedImportsVisitor +from libcst.codemod.visitors._imports import ImportItem from libcst.codemod.visitors._remove_imports import RemoveImportsVisitor __all__ = [ @@ -22,5 +23,6 @@ __all__ = [ "GatherImportsVisitor", "GatherNamesFromStringAnnotationsVisitor", "GatherUnusedImportsVisitor", + "ImportItem", "RemoveImportsVisitor", ] diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index 248d3838..64131dd6 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -2,7 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -# + from collections import defaultdict from typing import Dict, List, Optional, Sequence, Set, Tuple, Union @@ -11,6 +11,7 @@ from libcst import matchers as m, parse_statement from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._gather_imports import GatherImportsVisitor +from libcst.codemod.visitors._imports import ImportItem from libcst.helpers import get_absolute_module_for_import @@ -63,7 +64,7 @@ class AddImportsVisitor(ContextAwareTransformer): @staticmethod def _get_imports_from_context( context: CodemodContext, - ) -> List[Tuple[str, Optional[str], Optional[str]]]: + ) -> List[ImportItem]: imports = context.scratch.get(AddImportsVisitor.CONTEXT_KEY, []) if not isinstance(imports, list): raise Exception("Logic error!") @@ -75,6 +76,7 @@ class AddImportsVisitor(ContextAwareTransformer): module: str, obj: Optional[str] = None, asname: Optional[str] = None, + relative: int = 0, ) -> None: """ Schedule an import to be added in a future invocation of this class by @@ -96,64 +98,73 @@ class AddImportsVisitor(ContextAwareTransformer): if module == "__future__" and obj is None: raise Exception("Cannot import __future__ directly!") imports = AddImportsVisitor._get_imports_from_context(context) - imports.append((module, obj, asname)) + imports.append(ImportItem(module, obj, asname, relative)) context.scratch[AddImportsVisitor.CONTEXT_KEY] = imports def __init__( self, context: CodemodContext, - imports: Sequence[Tuple[str, Optional[str], Optional[str]]] = (), + imports: Sequence[ImportItem] = (), ) -> None: # Allow for instantiation from either a context (used when multiple transforms # get chained) or from a direct instantiation. super().__init__(context) - imps: List[Tuple[str, Optional[str], Optional[str]]] = [ + imps: List[ImportItem] = [ *AddImportsVisitor._get_imports_from_context(context), *imports, ] # Verify that the imports are valid - for module, obj, alias in imps: - if module == "__future__" and obj is None: + for imp in imps: + if imp.module == "__future__" and imp.obj_name is None: raise Exception("Cannot import __future__ directly!") - if module == "__future__" and alias is not None: + if imp.module == "__future__" and imp.alias is not None: raise Exception("Cannot import __future__ objects with aliases!") + # Resolve relative imports if we have a module name + imps = [imp.resolve_relative(self.context.full_module_name) for imp in imps] + # List of modules we need to ensure are imported self.module_imports: Set[str] = { - module for (module, obj, alias) in imps if obj is None and alias is None + imp.module for imp in imps if imp.obj_name is None and imp.alias is None } # List of modules we need to check for object imports on from_imports: Set[str] = { - module for (module, obj, alias) in imps if obj is not None and alias is None + imp.module for imp in imps if imp.obj_name is not None and imp.alias is None } # Mapping of modules we're adding to the object they should import self.module_mapping: Dict[str, Set[str]] = { module: { - o for (m, o, n) in imps if m == module and o is not None and n is None + imp.obj_name + for imp in imps + if imp.module == module + and imp.obj_name is not None + and imp.alias is None } for module in sorted(from_imports) } # List of aliased modules we need to ensure are imported self.module_aliases: Dict[str, str] = { - module: alias - for (module, obj, alias) in imps - if obj is None and alias is not None + imp.module: imp.alias + for imp in imps + if imp.obj_name is None and imp.alias is not None } # List of modules we need to check for object imports on from_imports_aliases: Set[str] = { - module - for (module, obj, alias) in imps - if obj is not None and alias is not None + imp.module + for imp in imps + if imp.obj_name is not None and imp.alias is not None } # Mapping of modules we're adding to the object with alias they should import self.alias_mapping: Dict[str, List[Tuple[str, str]]] = { module: [ - (o, n) - for (m, o, n) in imps - if m == module and o is not None and n is not None + (imp.obj_name, imp.alias) + for imp in imps + if imp.module == module + and imp.obj_name is not None + and imp.alias is not None ] for module in sorted(from_imports_aliases) } diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index d29b6c9f..8a4fccfe 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -12,6 +12,7 @@ from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor +from libcst.codemod.visitors._imports import ImportItem from libcst.helpers import get_full_name_for_node from libcst.metadata import PositionProvider, QualifiedNameProvider @@ -416,7 +417,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): tree_with_imports = AddImportsVisitor( context=self.context, imports=( - [("__future__", "annotations", None)] + [ImportItem("__future__", "annotations", None)] if self.use_future_annotations else () ), diff --git a/libcst/codemod/visitors/_imports.py b/libcst/codemod/visitors/_imports.py new file mode 100644 index 00000000..5a703112 --- /dev/null +++ b/libcst/codemod/visitors/_imports.py @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from dataclasses import dataclass, replace +from typing import Optional + +from libcst.helpers import get_absolute_module + + +@dataclass(frozen=True) +class ImportItem: + """Representation of individual import items for codemods.""" + + module_name: str + obj_name: Optional[str] = None + alias: Optional[str] = None + relative: int = 0 + + def __post_init__(self) -> None: + if self.module_name is None: + object.__setattr__(self, "module_name", "") + elif self.module_name.startswith("."): + mod = self.module_name.lstrip(".") + rel = self.relative + len(self.module_name) - len(mod) + object.__setattr__(self, "module_name", mod) + object.__setattr__(self, "relative", rel) + + @property + def module(self) -> str: + return "." * self.relative + self.module_name + + def resolve_relative(self, base_module: Optional[str]) -> "ImportItem": + """Return an ImportItem with an absolute module name if possible.""" + mod = self + # `import ..a` -> `from .. import a` + if mod.relative and mod.obj_name is None: + mod = replace(mod, module_name="", obj_name=mod.module_name) + if base_module is None: + return mod + m = get_absolute_module(base_module, mod.module_name or None, self.relative) + return mod if m is None else replace(mod, module_name=m, relative=0) diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index 4e410a14..6a88b335 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. # from libcst.codemod import CodemodContext, CodemodTest -from libcst.codemod.visitors import AddImportsVisitor +from libcst.codemod.visitors import AddImportsVisitor, ImportItem class TestAddImportsCodemod(CodemodTest): @@ -55,7 +55,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", None, None)]) + self.assertCodemod(before, after, [ImportItem("a.b.c", None, None)]) def test_dont_add_module_simple(self) -> None: """ @@ -81,7 +81,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", None, None)]) + self.assertCodemod(before, after, [ImportItem("a.b.c", None, None)]) def test_add_module_alias_simple(self) -> None: """ @@ -105,7 +105,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", None, "d")]) + self.assertCodemod(before, after, [ImportItem("a.b.c", None, "d")]) def test_dont_add_module_alias_simple(self) -> None: """ @@ -131,7 +131,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", None, "d")]) + self.assertCodemod(before, after, [ImportItem("a.b.c", None, "d")]) def test_add_module_complex(self) -> None: """ @@ -167,11 +167,11 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ - ("a.b.c", None, None), - ("defg.hi", None, None), - ("argparse", None, None), - ("jkl", None, "h"), - ("i.j", None, "k"), + ImportItem("a.b.c", None, None), + ImportItem("defg.hi", None, None), + ImportItem("argparse", None, None), + ImportItem("jkl", None, "h"), + ImportItem("i.j", None, "k"), ], ) @@ -197,7 +197,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", "D", None)]) + self.assertCodemod(before, after, [ImportItem("a.b.c", "D", None)]) def test_add_object_alias_simple(self) -> None: """ @@ -221,7 +221,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", "D", "E")]) + self.assertCodemod(before, after, [ImportItem("a.b.c", "D", "E")]) def test_add_future(self) -> None: """ @@ -250,7 +250,9 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("__future__", "dummy_feature", None)]) + self.assertCodemod( + before, after, [ImportItem("__future__", "dummy_feature", None)] + ) def test_dont_add_object_simple(self) -> None: """ @@ -276,7 +278,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", "D", None)]) + self.assertCodemod(before, after, [ImportItem("a.b.c", "D", None)]) def test_dont_add_object_alias_simple(self) -> None: """ @@ -302,7 +304,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", "D", "E")]) + self.assertCodemod(before, after, [ImportItem("a.b.c", "D", "E")]) def test_add_object_modify_simple(self) -> None: """ @@ -328,7 +330,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", "D", None)]) + self.assertCodemod(before, after, [ImportItem("a.b.c", "D", None)]) def test_add_object_alias_modify_simple(self) -> None: """ @@ -354,7 +356,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", "D", "_")]) + self.assertCodemod(before, after, [ImportItem("a.b.c", "D", "_")]) def test_add_object_modify_complex(self) -> None: """ @@ -387,17 +389,17 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ - ("a.b.c", "D", None), - ("a.b.c", "F", None), - ("a.b.c", "G", "H"), - ("d.e.f", "Foo", None), - ("g.h.i", "Z", None), - ("g.h.i", "X", None), - ("d.e.f", "Bar", None), - ("d.e.f", "Baz", "Qux"), - ("g.h.i", "Y", None), - ("g.h.i", "V", "W"), - ("a.b.c", "F", None), + ImportItem("a.b.c", "D", None), + ImportItem("a.b.c", "F", None), + ImportItem("a.b.c", "G", "H"), + ImportItem("d.e.f", "Foo", None), + ImportItem("g.h.i", "Z", None), + ImportItem("g.h.i", "X", None), + ImportItem("d.e.f", "Bar", None), + ImportItem("d.e.f", "Baz", "Qux"), + ImportItem("g.h.i", "Y", None), + ImportItem("g.h.i", "V", "W"), + ImportItem("a.b.c", "F", None), ], ) @@ -440,18 +442,18 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ - ("a.b.c", "D", None), - ("a.b.c", "F", None), - ("d.e.f", "Foo", None), - ("sys", None, None), - ("g.h.i", "Z", None), - ("g.h.i", "X", None), - ("d.e.f", "Bar", None), - ("g.h.i", "Y", None), - ("foo", None, None), - ("a.b.c", "F", None), - ("bar", None, "baz"), - ("qux", None, "quux"), + ImportItem("a.b.c", "D", None), + ImportItem("a.b.c", "F", None), + ImportItem("d.e.f", "Foo", None), + ImportItem("sys", None, None), + ImportItem("g.h.i", "Z", None), + ImportItem("g.h.i", "X", None), + ImportItem("d.e.f", "Bar", None), + ImportItem("g.h.i", "Y", None), + ImportItem("foo", None, None), + ImportItem("a.b.c", "F", None), + ImportItem("bar", None, "baz"), + ImportItem("qux", None, "quux"), ], ) @@ -481,7 +483,7 @@ class TestAddImportsCodemod(CodemodTest): return 5 """ - self.assertCodemod(before, after, [("a.b.c", "D", None)]) + self.assertCodemod(before, after, [ImportItem("a.b.c", "D", None)]) def test_add_import_preserve_doctring_multiples(self) -> None: """ @@ -511,7 +513,9 @@ class TestAddImportsCodemod(CodemodTest): """ self.assertCodemod( - before, after, [("a.b.c", "D", None), ("argparse", None, None)] + before, + after, + [ImportItem("a.b.c", "D", None), ImportItem("argparse", None, None)], ) def test_strict_module_no_imports(self) -> None: @@ -532,7 +536,7 @@ class TestAddImportsCodemod(CodemodTest): pass """ - self.assertCodemod(before, after, [("argparse", None, None)]) + self.assertCodemod(before, after, [ImportItem("argparse", None, None)]) def test_strict_module_with_imports(self) -> None: """ @@ -556,7 +560,7 @@ class TestAddImportsCodemod(CodemodTest): pass """ - self.assertCodemod(before, after, [("argparse", None, None)]) + self.assertCodemod(before, after, [ImportItem("argparse", None, None)]) def test_dont_add_relative_object_simple(self) -> None: """ @@ -585,7 +589,7 @@ class TestAddImportsCodemod(CodemodTest): self.assertCodemod( before, after, - [("a.b.c", "D", None)], + [ImportItem("a.b.c", "D", None)], context_override=CodemodContext(full_module_name="a.b.foobar"), ) @@ -616,7 +620,7 @@ class TestAddImportsCodemod(CodemodTest): self.assertCodemod( before, after, - [("a.b.c", "D", None)], + [ImportItem("a.b.c", "D", None)], context_override=CodemodContext(full_module_name="a.b.foobar"), ) @@ -634,7 +638,220 @@ class TestAddImportsCodemod(CodemodTest): self.assertCodemod( before, after, - [("a", "f", None), ("a", "g", "y"), ("a", "c", None), ("a", "d", "x")], + [ + ImportItem("a", "f", None), + ImportItem("a", "g", "y"), + ImportItem("a", "c", None), + ImportItem("a", "d", "x"), + ], + context_override=CodemodContext(full_module_name="a.b.foobar"), + ) + + def test_add_explicit_relative(self) -> None: + """ + Should add a relative import from .. . + """ + + before = """ + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + after = """ + from .. import a + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + + self.assertCodemod( + before, + after, + [ImportItem("a", None, None, 2)], + ) + + def test_add_explicit_relative_alias(self) -> None: + """ + Should add a relative import from .. . + """ + + before = """ + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + after = """ + from .. import a as foo + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + + self.assertCodemod( + before, + after, + [ImportItem("a", None, "foo", 2)], + ) + + def test_add_explicit_relative_object_simple(self) -> None: + """ + Should add a relative import. + """ + + before = """ + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + after = """ + from ..a import B + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + + self.assertCodemod( + before, + after, + [ImportItem("a", "B", None, 2)], + ) + + def test_dont_add_explicit_relative_object_simple(self) -> None: + """ + Should not add object as an import since it exists. + """ + + before = """ + from ..c import D + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + after = """ + from ..c import D + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + + self.assertCodemod( + before, + after, + [ImportItem("c", "D", None, 2)], + context_override=CodemodContext(full_module_name="a.b.foobar"), + ) + + def test_add_object_explicit_relative_modify_simple(self) -> None: + """ + Should modify existing import to add new object. + """ + + before = """ + from ..c import E, F + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + after = """ + from ..c import D, E, F + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + + self.assertCodemod( + before, + after, + [ImportItem("c", "D", None, 2)], + context_override=CodemodContext(full_module_name="a.b.foobar"), + ) + + def test_add_object_resolve_explicit_relative_modify_simple(self) -> None: + """ + Should merge a relative new module with an absolute existing one. + """ + + before = """ + from ..c import E, F + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + after = """ + from ..c import D, E, F + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + + self.assertCodemod( + before, + after, + [ImportItem("c", "D", None, 2)], + context_override=CodemodContext(full_module_name="a.b.foobar"), + ) + + def test_add_object_resolve_dotted_relative_modify_simple(self) -> None: + """ + Should merge a relative new module with an absolute existing one. + """ + + before = """ + from ..c import E, F + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + after = """ + from ..c import D, E, F + + def foo() -> None: + pass + + def bar() -> int: + return 5 + """ + + self.assertCodemod( + before, + after, + [ImportItem("..c", "D", None)], context_override=CodemodContext(full_module_name="a.b.foobar"), ) @@ -655,6 +872,6 @@ class TestAddImportsCodemod(CodemodTest): self.assertCodemod( before, after, - [("__future__", "annotations", None)], + [ImportItem("__future__", "annotations", None)], context_override=CodemodContext(full_module_name="a.b.foobar"), ) diff --git a/libcst/helpers/__init__.py b/libcst/helpers/__init__.py index 3e23a6d9..ccd12c72 100644 --- a/libcst/helpers/__init__.py +++ b/libcst/helpers/__init__.py @@ -5,6 +5,7 @@ # from libcst.helpers._statement import ( + get_absolute_module, get_absolute_module_for_import, get_absolute_module_for_import_or_raise, ) @@ -21,6 +22,7 @@ from libcst.helpers.expression import ( from libcst.helpers.module import insert_header_comments __all__ = [ + "get_absolute_module", "get_absolute_module_for_import", "get_absolute_module_for_import_or_raise", "get_full_name_for_node", diff --git a/libcst/helpers/_statement.py b/libcst/helpers/_statement.py index 0d21e225..f62a5eb8 100644 --- a/libcst/helpers/_statement.py +++ b/libcst/helpers/_statement.py @@ -9,14 +9,9 @@ import libcst as cst from libcst.helpers.expression import get_full_name_for_node -def get_absolute_module_for_import( - current_module: Optional[str], import_node: cst.ImportFrom +def get_absolute_module( + current_module: Optional[str], module_name: Optional[str], num_dots: int ) -> Optional[str]: - # First, let's try to grab the module name, regardless of relative status. - module = import_node.module - module_name = get_full_name_for_node(module) if module is not None else None - # Now, get the relative import location if it exists. - num_dots = len(import_node.relative) if num_dots == 0: # This is an absolute import, so the module is correct. return module_name @@ -43,6 +38,17 @@ def get_absolute_module_for_import( return base_module if len(base_module) > 0 else None +def get_absolute_module_for_import( + current_module: Optional[str], import_node: cst.ImportFrom +) -> Optional[str]: + # First, let's try to grab the module name, regardless of relative status. + module = import_node.module + module_name = get_full_name_for_node(module) if module is not None else None + # Now, get the relative import location if it exists. + num_dots = len(import_node.relative) + return get_absolute_module(current_module, module_name, num_dots) + + def get_absolute_module_for_import_or_raise( current_module: Optional[str], import_node: cst.ImportFrom ) -> str: From 1937fbf47d83ae788bc12c6da07c79b2195fdaab Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Sun, 9 Jan 2022 08:32:13 -0800 Subject: [PATCH 184/632] Add a `Building` subsection to the `Developing` docs (#593) * Add a build section to the docs * Tweaks based on code review --- README.rst | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index 9ac213c3..9f374c4d 100644 --- a/README.rst +++ b/README.rst @@ -172,6 +172,33 @@ this: See the `unittest documentation `_ for more examples of how to run tests. + +Building +~~~~~~~~ + +In order to build LibCST, which includes a native parser module, you +will need to have the Rust build tool ``cargo`` on your path. You can +usually install ``cargo`` using your system package manager, but the +most popular way to install cargo is using +`rustup `_. + +To build just the native parser, do the following from the ``native`` +directory: + +.. code-block:: shell + + cargo build + +To build the ``libcst.native`` module and install ``libcst``, run this +from the root: + +.. code-block:: shell + + pip uninstall -y libcst + pip install -e . + +Type Checking +~~~~~~~~~~~~~ We use `Pyre `_ for type-checking. @@ -181,12 +208,11 @@ To verify types for the library, do the following in the root: pyre check -*Note*: You may need to remove installed LibCST and install from the source code prior to type checking: +*Note:* You may need to run the ``pip install -e .`` command prior +to type checking, see the section above on building. -.. code-block:: shell - - pip uninstall -y libcst - pip install -e . +Generating Documents +~~~~~~~~~~~~~~~~~~~~ To generate documents, do the following in the root: From 8a7c13ff365a284f7bb7ff0bcd280cd6a5314436 Mon Sep 17 00:00:00 2001 From: Martin DeMello Date: Mon, 10 Jan 2022 12:41:21 -0800 Subject: [PATCH 185/632] Use precise signature matching when inserting function type annotations (#591) * Use precise signature matching when inserting function type annotations * add type annotations * Add an argument for strict annotation matching. * don't use Any --- .../visitors/_apply_type_annotations.py | 149 ++++++++++-- .../tests/test_apply_type_annotations.py | 215 ++++++++++++++++++ 2 files changed, 351 insertions(+), 13 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 8a4fccfe..cb272bb4 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -19,6 +19,13 @@ from libcst.metadata import PositionProvider, QualifiedNameProvider NameOrAttribute = Union[cst.Name, cst.Attribute] NAME_OR_ATTRIBUTE = (cst.Name, cst.Attribute) +# Union type for *args and **args +StarParamType = Union[ + None, + cst._maybe_sentinel.MaybeSentinel, + cst._nodes.expression.Param, + cst._nodes.expression.ParamStar, +] def _get_import_alias_names(import_aliases: Sequence[cst.ImportAlias]) -> Set[str]: @@ -44,6 +51,29 @@ def _get_import_names(imports: Sequence[Union[cst.Import, cst.ImportFrom]]) -> S return import_names +def _is_set(x: Union[None, cst.CSTNode, cst.MaybeSentinel]) -> bool: + return x is not None and x != cst.MaybeSentinel.DEFAULT + + +@dataclass(frozen=True) +class FunctionKey: + name: str + pos: int + kwonly: str + posonly: int + star_arg: bool + star_kwarg: bool + + @classmethod + def make(cls, name: str, params: cst.Parameters) -> "FunctionKey": + pos = len(params.params) + kwonly = ",".join(sorted(x.name.value for x in params.kwonly_params)) + posonly = len(params.posonly_params) + star_arg = _is_set(params.star_arg) + star_kwarg = _is_set(params.star_kwarg) + return cls(name, pos, kwonly, posonly, star_arg, star_kwarg) + + @dataclass(frozen=True) class FunctionAnnotation: parameters: cst.Parameters @@ -64,7 +94,7 @@ class TypeCollector(cst.CSTVisitor): # Qualifier for storing the canonical name of the current function. self.qualifier: List[str] = [] # Store the annotations. - self.function_annotations: Dict[str, FunctionAnnotation] = {} + self.function_annotations: Dict[FunctionKey, FunctionAnnotation] = {} self.attribute_annotations: Dict[str, cst.Annotation] = {} self.existing_imports: Set[str] = existing_imports self.class_definitions: Dict[str, cst.ClassDef] = {} @@ -100,7 +130,9 @@ class TypeCollector(cst.CSTVisitor): self._handle_Annotation(annotation=returns) if returns is not None else None ) parameter_annotations = self._handle_Parameters(node.params) - self.function_annotations[".".join(self.qualifier)] = FunctionAnnotation( + name = ".".join(self.qualifier) + key = FunctionKey.make(name, node.params) + self.function_annotations[key] = FunctionAnnotation( parameters=parameter_annotations, returns=return_annotation ) @@ -272,7 +304,9 @@ class TypeCollector(cst.CSTVisitor): @dataclass(frozen=True) class Annotations: - function_annotations: Dict[str, FunctionAnnotation] = field(default_factory=dict) + function_annotations: Dict[FunctionKey, FunctionAnnotation] = field( + default_factory=dict + ) attribute_annotations: Dict[str, cst.Annotation] = field(default_factory=dict) class_definitions: Dict[str, cst.ClassDef] = field(default_factory=dict) @@ -338,6 +372,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): annotations: Optional[Annotations] = None, overwrite_existing_annotations: bool = False, use_future_annotations: bool = False, + strict_posargs_matching: bool = True, + strict_annotation_matching: bool = False, ) -> None: super().__init__(context) # Qualifier for storing the canonical name of the current function. @@ -349,6 +385,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.visited_classes: Set[str] = set() self.overwrite_existing_annotations = overwrite_existing_annotations self.use_future_annotations = use_future_annotations + self.strict_posargs_matching = strict_posargs_matching + self.strict_annotation_matching = strict_annotation_matching # We use this to determine the end of the import block so that we can # insert top-level annotations. @@ -365,6 +403,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): stub: cst.Module, overwrite_existing_annotations: bool = False, use_future_annotations: bool = False, + strict_posargs_matching: bool = True, + strict_annotation_matching: bool = False, ) -> None: """ Store a stub module in the :class:`~libcst.codemod.CodemodContext` so @@ -381,6 +421,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): stub, overwrite_existing_annotations, use_future_annotations, + strict_posargs_matching, + strict_annotation_matching, ) def transform_module_impl(self, tree: cst.Module) -> cst.Module: @@ -401,6 +443,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): stub, overwrite_existing_annotations, use_future_annotations, + strict_posargs_matching, + strict_annotation_matching, ) = context_contents self.overwrite_existing_annotations = ( self.overwrite_existing_annotations or overwrite_existing_annotations @@ -408,6 +452,12 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.use_future_annotations = ( self.use_future_annotations or use_future_annotations ) + self.strict_posargs_matching = ( + self.strict_posargs_matching and strict_posargs_matching + ) + self.strict_annotation_matching = ( + self.strict_annotation_matching or strict_annotation_matching + ) visitor = TypeCollector(existing_import_names, self.context) cst.MetadataWrapper(stub).visit(visitor) self.annotations.function_annotations.update(visitor.function_annotations) @@ -538,15 +588,21 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # Don't override existing annotations or default values unless asked # to overwrite existing annotations. def update_annotation( - parameters: Sequence[cst.Param], annotations: Sequence[cst.Param] + parameters: Sequence[cst.Param], + annotations: Sequence[cst.Param], + positional: bool, ) -> List[cst.Param]: parameter_annotations = {} annotated_parameters = [] - for parameter in annotations: + positional = positional and not self.strict_posargs_matching + for i, parameter in enumerate(annotations): + key = i if positional else parameter.name.value if parameter.annotation: - parameter_annotations[parameter.name.value] = parameter.annotation - for parameter in parameters: - key = parameter.name.value + parameter_annotations[key] = parameter.annotation.with_changes( + whitespace_before_indicator=cst.SimpleWhitespace(value="") + ) + for i, parameter in enumerate(parameters): + key = i if positional else parameter.name.value if key in parameter_annotations and ( self.overwrite_existing_annotations or not parameter.annotation ): @@ -559,14 +615,19 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return annotations.parameters.with_changes( params=update_annotation( - updated_node.params.params, annotations.parameters.params + updated_node.params.params, + annotations.parameters.params, + positional=True, ), kwonly_params=update_annotation( - updated_node.params.kwonly_params, annotations.parameters.kwonly_params + updated_node.params.kwonly_params, + annotations.parameters.kwonly_params, + positional=False, ), posonly_params=update_annotation( updated_node.params.posonly_params, annotations.parameters.posonly_params, + positional=True, ), ) @@ -594,6 +655,64 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): *statements[1:], ] + def _match_signatures( # noqa: C901: Too complex + self, + function: cst.FunctionDef, + annotations: FunctionAnnotation, + ) -> bool: + """Check that function annotations on both signatures are compatible.""" + + def compatible( + p: Optional[cst.Annotation], q: Optional[cst.Annotation] + ) -> bool: + if self.overwrite_existing_annotations or not _is_set(p) or not _is_set(q): + return True + if not self.strict_annotation_matching: + # We will not overwrite clashing annotations, but the signature as a + # whole will be marked compatible so that holes can be filled in. + return True + return p.annotation.deep_equals(q.annotation) # pyre-ignore[16] + + def match_posargs(ps: Sequence[cst.Param], qs: Sequence[cst.Param]) -> bool: + if len(ps) != len(qs): + return False + for p, q in zip(ps, qs): + if self.strict_posargs_matching and not p.name.value == q.name.value: + return False + if not compatible(p.annotation, q.annotation): + return False + return True + + def match_kwargs(ps: Sequence[cst.Param], qs: Sequence[cst.Param]) -> bool: + ps_dict = {x.name.value: x for x in ps} + qs_dict = {x.name.value: x for x in qs} + if set(ps_dict.keys()) != set(qs_dict.keys()): + return False + for k in ps_dict.keys(): + if not compatible(ps_dict[k].annotation, qs_dict[k].annotation): + return False + return True + + def match_star(p: StarParamType, q: StarParamType) -> bool: + return _is_set(p) == _is_set(q) + + def match_params(f: cst.FunctionDef, g: FunctionAnnotation) -> bool: + p, q = f.params, g.parameters + return ( + match_posargs(p.params, q.params) + and match_posargs(p.posonly_params, q.posonly_params) + and match_kwargs(p.kwonly_params, q.kwonly_params) + and match_star(p.star_arg, q.star_arg) + and match_star(p.star_kwarg, q.star_kwarg) + ) + + def match_return(f: cst.FunctionDef, g: FunctionAnnotation) -> bool: + return compatible(f.returns, g.returns) + + return match_params(function, annotations) and match_return( + function, annotations + ) + # transform API methods def visit_ClassDef(self, node: cst.ClassDef) -> None: @@ -614,12 +733,16 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): def leave_FunctionDef( self, original_node: cst.FunctionDef, updated_node: cst.FunctionDef ) -> cst.FunctionDef: - key = self._qualifier_name() + key = FunctionKey.make(self._qualifier_name(), updated_node.params) self.qualifier.pop() if key in self.annotations.function_annotations: function_annotation = self.annotations.function_annotations[key] - # Only add new annotation if explicitly told to overwrite existing - # annotations or if one doesn't already exist. + # Only add new annotation if: + # * we have matching function signatures and + # * we are explicitly told to overwrite existing annotations or + # * there is no existing annotation + if not self._match_signatures(updated_node, function_annotation): + return updated_node set_return_annotation = ( self.overwrite_existing_annotations or updated_node.returns is None ) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index c5fb7f9e..150e996a 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -994,6 +994,221 @@ class TestApplyAnnotationsVisitor(CodemodTest): use_future_annotations=True, ) + @data_provider( + { + "mismatched_signature_posargs": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(a): + return 'hello' + """, + """ + def f(a): + return 'hello' + """, + ), + "mismatched_signature_annotation": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(a, b: int): + return 'hello' + """, + """ + def f(a: bool, b: int) -> str: + return 'hello' + """, + ), + "mismatched_posarg_names": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(x, y): + return 'hello' + """, + """ + def f(x, y): + return 'hello' + """, + ), + "mismatched_return_type": ( + """ + def f(a: bool, b: bool) -> int: ... + """, + """ + def f(a, b) -> str: + return 'hello' + """, + """ + def f(a: bool, b: bool) -> str: + return 'hello' + """, + ), + "matched_signature": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(a: bool, b = False): + return 'hello' + """, + """ + def f(a: bool, b: bool = False) -> str: + return 'hello' + """, + ), + "matched_signature_with_permuted_kwargs": ( + """ + def f(*, a: bool, b: bool) -> str: ... + """, + """ + def f(*, b: bool, a = False): + return 'hello' + """, + """ + def f(*, b: bool, a: bool = False) -> str: + return 'hello' + """, + ), + } + ) + def test_signature_matching(self, stub: str, before: str, after: str) -> None: + self.run_test_case_with_flags( + stub=stub, + before=before, + after=after, + ) + + @data_provider( + { + "mismatched_posarg_names": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(x, y): + return 'hello' + """, + """ + def f(x: bool, y: bool) -> str: + return 'hello' + """, + ), + "mismatched_kwarg_names": ( + """ + def f(p: int, q: str, *, a: bool, b: bool) -> str: ... + """, + """ + def f(p, q, *, x, y): + return 'hello' + """, + """ + def f(p, q, *, x, y): + return 'hello' + """, + ), + } + ) + def test_signature_matching_with_nonstrict_posargs( + self, stub: str, before: str, after: str + ) -> None: + self.run_test_case_with_flags( + stub=stub, before=before, after=after, strict_posargs_matching=False + ) + + @data_provider( + { + "mismatched_signature_posargs": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(a): + return 'hello' + """, + """ + def f(a): + return 'hello' + """, + ), + "mismatched_signature_annotation": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(a, b: int): + return 'hello' + """, + """ + def f(a, b: int): + return 'hello' + """, + ), + "mismatched_posarg_names": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(x, y): + return 'hello' + """, + """ + def f(x, y): + return 'hello' + """, + ), + "mismatched_return_type": ( + """ + def f(a: bool, b: bool) -> int: ... + """, + """ + def f(a, b) -> str: + return 'hello' + """, + """ + def f(a, b) -> str: + return 'hello' + """, + ), + "matched_signature": ( + """ + def f(a: bool, b: bool) -> str: ... + """, + """ + def f(a: bool, b = False): + return 'hello' + """, + """ + def f(a: bool, b: bool = False) -> str: + return 'hello' + """, + ), + "matched_signature_with_permuted_kwargs": ( + """ + def f(*, a: bool, b: bool) -> str: ... + """, + """ + def f(*, b: bool, a = False): + return 'hello' + """, + """ + def f(*, b: bool, a: bool = False) -> str: + return 'hello' + """, + ), + } + ) + def test_signature_matching_with_strict_annotation_matching( + self, stub: str, before: str, after: str + ) -> None: + self.run_test_case_with_flags( + stub=stub, before=before, after=after, strict_annotation_matching=True + ) + @data_provider( { "test_counting_parameters_and_returns": ( From 31ba5bf58390a09e6048c1f2b8229ff1c9971ee9 Mon Sep 17 00:00:00 2001 From: Martin DeMello Date: Tue, 11 Jan 2022 16:59:46 -0800 Subject: [PATCH 186/632] Remove unused argument to handle_Index (#595) --- libcst/codemod/visitors/_apply_type_annotations.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index cb272bb4..43f5ed03 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -232,7 +232,7 @@ class TypeCollector(cst.CSTVisitor): else: return dequalified_node - def _handle_Index(self, slice: cst.Index, node: cst.Subscript) -> cst.Index: + def _handle_Index(self, slice: cst.Index) -> cst.Index: value = slice.value if isinstance(value, cst.Subscript): return slice.with_changes(value=self._handle_Subscript(value)) @@ -265,7 +265,7 @@ class TypeCollector(cst.CSTVisitor): else: if isinstance(item.slice, cst.Index): new_index = item.slice.with_changes( - value=self._handle_Index(item.slice, item) + value=self._handle_Index(item.slice) ) item = item.with_changes(slice=new_index) new_slice.append(item) From 122627cabc8615a8ba33f503165c25dc8cb6b688 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 12 Jan 2022 09:34:01 -0800 Subject: [PATCH 187/632] Codemod for PEP 484 Assign w / type comments -> PEP 526 AnnAssign (#594) * Codemod for PEP 484 Assign w / type comments -> PEP 526 AnnAssign Summary: This codemod is intended to eventually handle all type comments from PEP 484. This is a partial implementation specifically handling assignment type comments, which as of PEP 526 are better dealt with using AnnAssign nodes. There is more work to do because there are two other kinds of comments to support: function heading comments and function parameter inline comments. But the PEP 526 functionality is complete so I feel like it's worth havign a PR / CI signals / code review at this stage. Test Plan: ``` python -m unittest libcst.codemod.commands.tests.test_convert_type_comments ``` * Disable on python 3.6, 3.7 The ast module didn't get the `type_comment` information we need until python 3.8. It is possible but not a priority right now to enable 3.6 and 3.7 via the typed_ast library, for now I just throw a NotImplementedError with a nice description. There's a note in the code about where to look for a typed_ast example in case anyone wants to add support in the future. * Fix type errors on the 3.8+ testing fix * Do a better job of complaining on Python < 3.8 * Updates based on code review Summary: Do not strip type comments in the visitor pattern; instead, reach down from the parent to do it because this makes it much more reliable that we won't accidentally remove other comments in a codemod (using visitor state to do this isn't really feasible once we handle complex statements like FunctionDef, With, For). Handle multi-statement statement lines; this works since the trailing whitespace can only apply to the final statement on the line. It's not really a critical edge case to handle, but the code is no more complicated so we might as well. * Prevent comment stripping for multi-assign * Note in the docstring that this is a limited WIP * Reorder checks so the next step will be cleaner --- .../codemod/commands/convert_type_comments.py | 146 ++++++++++++++++++ .../tests/test_convert_type_comments.py | 88 +++++++++++ 2 files changed, 234 insertions(+) create mode 100644 libcst/codemod/commands/convert_type_comments.py create mode 100644 libcst/codemod/commands/tests/test_convert_type_comments.py diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py new file mode 100644 index 00000000..337904d2 --- /dev/null +++ b/libcst/codemod/commands/convert_type_comments.py @@ -0,0 +1,146 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import ast +import builtins +import functools +import sys +from typing import Optional, Set, Union + +import libcst as cst +from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand + + +@functools.lru_cache() +def _empty_module() -> cst.Module: + return cst.parse_module("") + + +def _code_for_node(node: cst.CSTNode) -> str: + return _empty_module().code_for_node(node) + + +def _ast_for_node(node: cst.CSTNode) -> ast.Module: + code = _code_for_node(node) + return ast.parse(code, type_comments=True) + + +def _simple_statement_type_comment( + node: cst.SimpleStatementLine, +) -> Optional[str]: + return _ast_for_node(node).body[-1].type_comment + + +@functools.lru_cache() +def _builtins() -> Set[str]: + return set(dir(builtins)) + + +def _is_builtin(annotation: str) -> bool: + return annotation in _builtins() + + +def _convert_annotation(raw: str) -> cst.Annotation: + # Convert annotation comments to string annotations to be safe, + # otherwise runtime errors would be common. + # + # Special-case builtins to reduce the amount of quoting noise. + # + # NOTE: we could potentially detect more cases for skipping quotes + # using ScopeProvider, which would make the output prettier. + if _is_builtin(raw): + return cst.Annotation(annotation=cst.Name(value=raw)) + else: + return cst.Annotation(annotation=cst.SimpleString(f'"{raw}"')) + + +class ConvertTypeComments(VisitorBasedCodemodCommand): + """ + Codemod that converts type comments, as described in + https://www.python.org/dev/peps/pep-0484/#type-comments, + into PEP 526 annotated assignments. + + This is a work in progress: the codemod only currently handles + single-annotation assigns, but it will preserve any type comments + that it does not consume. + """ + + def __init__(self, context: CodemodContext) -> None: + if (sys.version_info.major, sys.version_info.minor) < (3, 8): + # The ast module did not get `type_comments` until Python 3.7. + # In 3.6, we should error than silently running a nonsense codemod. + # + # NOTE: it is possible to use the typed_ast library for 3.6, but + # this is not a high priority right now. See, e.g., the + # mypy.fastparse module. + raise NotImplementedError( + "You are trying to run ConvertTypeComments on a " + + "python version without type comment support. Please " + + "try using python 3.8+ to run your codemod." + ) + super().__init__(context) + + def _strip_TrailingWhitespace( + self, + node: cst.TrailingWhitespace, + ) -> cst.TrailingWhitespace: + return node.with_changes( + whitespace=cst.SimpleWhitespace( + "" + ), # any whitespace came before the comment, so strip it. + comment=None, + ) + + def _convert_Assign( + self, + assign: cst.Assign, + type_comment: str, + ) -> Union[cst.AnnAssign, cst.Assign]: + if len(assign.targets) != 1: + # this case is not yet implemented, and we short-circuit + # it when handling SimpleStatementLine. + raise RuntimeError("Should not convert multi-target assign") + return cst.AnnAssign( + target=assign.targets[0].target, + annotation=_convert_annotation(raw=type_comment), + value=assign.value, + semicolon=assign.semicolon, + ) + + def leave_SimpleStatementLine( + self, + original_node: cst.SimpleStatementLine, + updated_node: cst.SimpleStatementLine, + ) -> cst.SimpleStatementLine: + """ + Convert any SimpleStatementLine containing an Assign with a + type comment into a one that uses a PEP 526 AnnAssign. + """ + # determine whether to apply an annotation + assign = updated_node.body[-1] + if not isinstance(assign, cst.Assign): # only Assign matters + return updated_node + type_comment = _simple_statement_type_comment(original_node) + if type_comment is None: + return updated_node + if len(assign.targets) != 1: # multi-target Assign isn't used + return updated_node + target = assign.targets[0].target + if isinstance(target, cst.Tuple): # multi-element Assign isn't handled + return updated_node + # At this point have a single-line Assign with a type comment. + # Convert it to an AnnAssign and strip the comment. + return updated_node.with_changes( + body=[ + *updated_node.body[:-1], + self._convert_Assign( + assign=assign, + type_comment=type_comment, + ), + ], + trailing_whitespace=self._strip_TrailingWhitespace( + updated_node.trailing_whitespace + ), + ) diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py new file mode 100644 index 00000000..5e7f96ea --- /dev/null +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -0,0 +1,88 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import sys + +from libcst.codemod import CodemodTest +from libcst.codemod.commands.convert_type_comments import ConvertTypeComments + + +class TestConvertTypeComments(CodemodTest): + + maxDiff = 1000 + TRANSFORM = ConvertTypeComments + + def assertCodemod38Plus(self, before: str, after: str) -> None: + """ + Assert that the codemod works on Python 3.8+, and that we raise + a NotImplementedError on other python versions. + """ + if (sys.version_info.major, sys.version_info.minor) < (3, 8): + with self.assertRaises(NotImplementedError): + super().assertCodemod(before, after) + else: + super().assertCodemod(before, after) + + # Tests converting assignment type comments ----------------- + + def test_convert_assignments(self) -> None: + before = """ + y = 5 # type: int + z = ('this', 7) # type: typing.Tuple[str, int] + """ + after = """ + y: int = 5 + z: "typing.Tuple[str, int]" = ('this', 7) + """ + self.assertCodemod38Plus(before, after) + + def test_convert_assignments_in_context(self) -> None: + """ + Also verify that our matching works regardless of spacing + """ + before = """ + bar(); baz = 12 # type: int + + def foo(): + z = ('this', 7) # type: typing.Tuple[str, int] + + class C: + attr0 = 10# type: int + def __init__(self): + self.attr1 = True # type: bool + """ + after = """ + bar(); baz: int = 12 + + def foo(): + z: "typing.Tuple[str, int]" = ('this', 7) + + class C: + attr0: int = 10 + def __init__(self): + self.attr1: bool = True + """ + self.assertCodemod38Plus(before, after) + + def test_no_change_when_type_comment_unused(self) -> None: + before = """ + # type-ignores are not type comments + x = 10 # type: ignore + + # a commented type comment (per PEP 484) is not a type comment + z = 15 # # type: int + + # a type comment in an illegal location won't be used + print("hello") # type: None + + # We currently cannot handle multiple-target assigns. + # Make sure we won't strip those type comments. + x, y, z = [], [], [] # type: List[int], List[int], List[str] + x, y, z = [], [], [] # type: (List[int], List[int], List[str]) + a, b, *c = range(5) # type: float, float, List[float] + a, b = 1, 2 # type: Tuple[int, int] + """ + after = before + self.assertCodemod38Plus(before, after) From 5f22b6c4383ebbbcee6af4bfc4d6c3aab2c38e6b Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 12 Jan 2022 16:10:04 +0000 Subject: [PATCH 188/632] bump version to 0.4.0 --- CHANGELOG.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c68fc7d6..a54d67cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,26 @@ +# 0.4.0 - 2022-01-12 + +This release contains a new parsing infrastructure that is turned off by default. You +can enable it by setting the `LIBCST_PARSER_TYPE` environment variable to `native` +before parsing an input with the usual LibCST APIs. Parsing Python 3.10 documents is +only supported in this new mode. + +Note: the new parser is built as a native extension, so LibCST will ship with binary +wheels from now on. + +## Added +* Implement a Python PEG parser in Rust by @zsol in [#566](https://github.com/Instagram/LibCST/pull/566) +* implement PEP-654: except* by @zsol in [#571](https://github.com/Instagram/LibCST/pull/571) +* Implement PEP-634 - Match statement by @zsol in [#568](https://github.com/Instagram/LibCST/pull/568) +* Add instructions to codegen test failures by @stroxler in [#582](https://github.com/Instagram/LibCST/pull/582) +* Support Parenthesized With Statements by @stroxler in [#584](https://github.com/Instagram/LibCST/pull/584) +* Support relative imports in AddImportsVisitor by @martindemello in [#585](https://github.com/Instagram/LibCST/pull/585) +* Codemod for PEP 484 Assign w / type comments -> PEP 526 AnnAssign by @stroxler in [#594](https://github.com/Instagram/LibCST/pull/594) + +## Updated +* Update license headers by @zsol in [#560](https://github.com/Instagram/LibCST/pull/560) +* Use precise signature matching when inserting function type annotations by @martindemello in [#591](https://github.com/Instagram/LibCST/pull/591) + # 0.3.23 - 2021-11-23 ## Fixed From e03ed43be88c8833ed013ec56f4d6200ba68eb92 Mon Sep 17 00:00:00 2001 From: Martin DeMello Date: Fri, 14 Jan 2022 11:39:18 -0800 Subject: [PATCH 189/632] Merge in TypeVars and Generic base classes in ApplyTypeAnnotationVisitor (#596) * Tracks TypeVars that are used in type annotations in the pyi file, and adds their Assign statements to the merged file. * Adds Generic[T] as a base class if needed. --- .../visitors/_apply_type_annotations.py | 108 ++++++++++++++++- .../tests/test_apply_type_annotations.py | 114 ++++++++++++++++++ 2 files changed, 219 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 43f5ed03..24cd50a7 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -8,6 +8,8 @@ from dataclasses import dataclass, field from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst as cst +import libcst.matchers as m + from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor @@ -55,6 +57,18 @@ def _is_set(x: Union[None, cst.CSTNode, cst.MaybeSentinel]) -> bool: return x is not None and x != cst.MaybeSentinel.DEFAULT +def _get_string_value(node: cst.SimpleString) -> str: + s = node.value + c = s[-1] + return s[s.index(c) : -1] + + +def _find_generic_base(node: cst.ClassDef) -> Optional[cst.Arg]: + for b in node.bases: + if m.matches(b.value, m.Subscript(value=m.Name("Generic"))): + return b + + @dataclass(frozen=True) class FunctionKey: name: str @@ -80,7 +94,7 @@ class FunctionAnnotation: returns: Optional[cst.Annotation] -class TypeCollector(cst.CSTVisitor): +class TypeCollector(m.MatcherDecoratableVisitor): """ Collect type annotations from a stub module. """ @@ -91,6 +105,7 @@ class TypeCollector(cst.CSTVisitor): ) def __init__(self, existing_imports: Set[str], context: CodemodContext) -> None: + super().__init__() # Qualifier for storing the canonical name of the current function. self.qualifier: List[str] = [] # Store the annotations. @@ -99,6 +114,9 @@ class TypeCollector(cst.CSTVisitor): self.existing_imports: Set[str] = existing_imports self.class_definitions: Dict[str, cst.ClassDef] = {} self.context = context + self.current_assign: Optional[cst.Assign] = None # used to collect typevars + self.typevars: Dict[str, cst.Assign] = {} + self.annotation_names: Set[str] = set() def visit_ClassDef(self, node: cst.ClassDef) -> None: self.qualifier.append(node.name.value) @@ -153,6 +171,29 @@ class TypeCollector(cst.CSTVisitor): def leave_AnnAssign(self, original_node: cst.AnnAssign) -> None: self.qualifier.pop() + def visit_Assign(self, node: cst.Assign) -> None: + self.current_assign = node + + def leave_Assign(self, original_node: cst.Assign) -> None: + self.current_assign = None + + @m.call_if_inside(m.Assign()) + @m.visit(m.Call(func=m.Name("TypeVar"))) + def record_typevar(self, node: cst.Call) -> None: + # pyre-ignore current_assign is never None here + name = get_full_name_for_node(self.current_assign.targets[0].target) + if name: + # pyre-ignore current_assign is never None here + self.typevars[name] = self.current_assign + self._handle_qualification_and_should_qualify("typing.TypeVar") + self.current_assign = None + + def leave_Module(self, original_node: cst.Module) -> None: + # Filter out unused typevars + self.typevars = { + k: v for k, v in self.typevars.items() if k in self.annotation_names + } + def _get_unique_qualified_name(self, node: cst.CSTNode) -> str: name = None names = [q.name for q in self.get_metadata(QualifiedNameProvider, node)] @@ -194,7 +235,7 @@ class TypeCollector(cst.CSTVisitor): def _handle_qualification_and_should_qualify(self, qualified_name: str) -> bool: """ - Basd on a qualified name and the existing module imports, record that + Based on a qualified name and the existing module imports, record that we need to add an import if necessary and return whether or not we should use the qualified name due to a preexisting import. """ @@ -227,6 +268,7 @@ class TypeCollector(cst.CSTVisitor): dequalified_node, ) = self._get_qualified_name_and_dequalified_node(node) should_qualify = self._handle_qualification_and_should_qualify(qualified_name) + self.annotation_names.add(qualified_name) if should_qualify: return node else: @@ -239,6 +281,8 @@ class TypeCollector(cst.CSTVisitor): elif isinstance(value, cst.Attribute): return slice.with_changes(value=self._handle_NameOrAttribute(value)) else: + if isinstance(value, cst.SimpleString): + self.annotation_names.add(_get_string_value(value)) return slice def _handle_Subscript(self, node: cst.Subscript) -> cst.Subscript: @@ -279,6 +323,7 @@ class TypeCollector(cst.CSTVisitor): def _handle_Annotation(self, annotation: cst.Annotation) -> cst.Annotation: node = annotation.annotation if isinstance(node, cst.SimpleString): + self.annotation_names.add(_get_string_value(node)) return annotation elif isinstance(node, cst.Subscript): return cst.Annotation(annotation=self._handle_Subscript(node)) @@ -309,6 +354,7 @@ class Annotations: ) attribute_annotations: Dict[str, cst.Annotation] = field(default_factory=dict) class_definitions: Dict[str, cst.ClassDef] = field(default_factory=dict) + typevars: Dict[str, cst.Assign] = field(default_factory=dict) @dataclass @@ -318,6 +364,7 @@ class AnnotationCounts: parameter_annotations: int = 0 return_annotations: int = 0 classes_added: int = 0 + typevars_and_generics_added: int = 0 def any_changes_applied(self) -> bool: return ( @@ -326,6 +373,7 @@ class AnnotationCounts: + self.parameter_annotations + self.return_annotations + self.classes_added + + self.typevars_and_generics_added ) > 0 @@ -397,6 +445,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # only made changes to the imports. self.annotation_counts: AnnotationCounts = AnnotationCounts() + # We use this to collect typevars, to avoid importing existing ones from the pyi file + self.current_assign: Optional[cst.Assign] = None + self.typevars: Dict[str, cst.Assign] = {} + @staticmethod def store_stub_in_context( context: CodemodContext, @@ -463,6 +515,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.annotations.function_annotations.update(visitor.function_annotations) self.annotations.attribute_annotations.update(visitor.attribute_annotations) self.annotations.class_definitions.update(visitor.class_definitions) + self.annotations.typevars.update(visitor.typevars) tree_with_imports = AddImportsVisitor( context=self.context, @@ -722,7 +775,16 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): def leave_ClassDef( self, original_node: cst.ClassDef, updated_node: cst.ClassDef ) -> cst.ClassDef: + cls_name = ".".join(self.qualifier) self.qualifier.pop() + definition = self.annotations.class_definitions.get(cls_name) + if definition: + b1 = _find_generic_base(definition) + b2 = _find_generic_base(updated_node) + if b1 and not b2: + new_bases = list(updated_node.bases) + [b1] + self.annotation_counts.typevars_and_generics_added += 1 + return updated_node.with_changes(bases=new_bases) return updated_node def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: @@ -756,10 +818,29 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return updated_node.with_changes(params=new_parameters) return updated_node + def visit_Assign(self, node: cst.Assign) -> None: + self.current_assign = node + + @m.call_if_inside(m.Assign()) + @m.visit(m.Call(func=m.Name("TypeVar"))) + def record_typevar(self, node: cst.Call) -> None: + # pyre-ignore current_assign is never None here + name = get_full_name_for_node(self.current_assign.targets[0].target) + if name: + # Preserve the whole node, even though we currently just use the + # name, so that we can match bounds and variance at some point and + # determine if two typevars with the same name are indeed the same. + + # pyre-ignore current_assign is never None here + self.typevars[name] = self.current_assign + self.current_assign = None + def leave_Assign( self, original_node: cst.Assign, updated_node: cst.Assign ) -> Union[cst.Assign, cst.AnnAssign]: + self.current_assign = None + if len(original_node.targets) > 1: for assign in original_node.targets: target = assign.target @@ -787,8 +868,17 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): for name, definition in self.annotations.class_definitions.items() if name not in self.visited_classes ] - if not self.toplevel_annotations and not fresh_class_definitions: + + # NOTE: The entire change will also be abandoned if + # self.annotation_counts is all 0s, so if adding any new category make + # sure to record it there. + if not ( + self.toplevel_annotations + or fresh_class_definitions + or self.annotations.typevars + ): return updated_node + toplevel_statements = [] # First, find the insertion point for imports statements_before_imports, statements_after_imports = self._split_module( @@ -806,6 +896,18 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): ) toplevel_statements.append(cst.SimpleStatementLine([annotated_assign])) + # TypeVar definitions could be scattered through the file, so do not + # attempt to put new ones with existing ones, just add them at the top. + typevars = { + k: v for k, v in self.annotations.typevars.items() if k not in self.typevars + } + if typevars: + for var, stmt in typevars.items(): + toplevel_statements.append(cst.Newline()) + toplevel_statements.append(stmt) + self.annotation_counts.typevars_and_generics_added += 1 + toplevel_statements.append(cst.Newline()) + self.annotation_counts.classes_added = len(fresh_class_definitions) toplevel_statements.extend(fresh_class_definitions) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 150e996a..8689afbd 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -821,6 +821,120 @@ class TestApplyAnnotationsVisitor(CodemodTest): def test_adding_typed_dicts(self, stub: str, before: str, after: str) -> None: self.run_simple_test_case(stub=stub, before=before, after=after) + @data_provider( + { + "insert_new_TypeVar_not_in_source_file": ( + """ + from typing import Dict, TypeVar + + _KT = TypeVar('_KT') + _VT = TypeVar('_VT') + + class UserDict(Dict[_KT, _VT]): + def __init__(self, initialdata: Dict[_KT, _VT] = ...): ... + """, + """ + class UserDict: + def __init__(self, initialdata = None): + pass + """, + """ + from typing import Dict, TypeVar + + _KT = TypeVar('_KT') + _VT = TypeVar('_VT') + + class UserDict: + def __init__(self, initialdata: Dict[_KT, _VT] = None): + pass + """, + ), + "insert_only_used_TypeVar_not_already_in_source": ( + """ + from typing import Dict, TypeVar + + K = TypeVar('K') + V = TypeVar('V') + X = TypeVar('X') + + class UserDict(Dict[K, V]): + def __init__(self, initialdata: Dict[K, V] = ...): ... + """, + """ + from typing import TypeVar + + V = TypeVar('V') + + class UserDict: + def __init__(self, initialdata = None): + pass + + def f(x: V) -> V: + pass + """, + """ + from typing import Dict, TypeVar + + K = TypeVar('K') + + V = TypeVar('V') + + class UserDict: + def __init__(self, initialdata: Dict[K, V] = None): + pass + + def f(x: V) -> V: + pass + """, + ), + "insert_Generic_base_class": ( + """ + from typing import TypeVar + + T = TypeVar('T') + X = TypeVar('X') + + class B(A, Generic[T]): + def f(self, x: T) -> T: ... + """, + """ + from typing import TypeVar + + V = TypeVar('V') + + def f(x: V) -> V: + pass + + class A: + pass + + class B(A): + def f(self, x): + pass + """, + """ + from typing import TypeVar + + T = TypeVar('T') + + V = TypeVar('V') + + def f(x: V) -> V: + pass + + class A: + pass + + class B(A, Generic[T]): + def f(self, x: T) -> T: + pass + """, + ), + } + ) + def test_adding_typevars(self, stub: str, before: str, after: str) -> None: + self.run_simple_test_case(stub=stub, before=before, after=after) + @data_provider( { "required_positional_only_args": ( From c7d76c65f3e0e7214f2c88d8de909a6e1f248b28 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 15 Jan 2022 12:42:42 +0000 Subject: [PATCH 190/632] Add docs about the native parts (#601) Co-authored-by: Steven Troxler --- README.rst | 11 +++- native/libcst/README.md | 122 ++++++++++++++++++++++++++-------------- 2 files changed, 90 insertions(+), 43 deletions(-) diff --git a/README.rst b/README.rst index 9f374c4d..be1d5d94 100644 --- a/README.rst +++ b/README.rst @@ -33,7 +33,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python .. intro-start -LibCST parses Python 3.0, 3.1, 3.3, 3.5, 3.6, 3.7 or 3.8 source code as a CST tree that keeps +LibCST parses Python 3.0 -> 3.11 source code as a CST tree that keeps all formatting details (comments, whitespaces, parentheses, etc). It's useful for building automated refactoring (codemod) applications and linters. @@ -129,6 +129,11 @@ packaging tools. We recommend installing the latest stable release from pip install libcst +For parsing, LibCST ships with a native extension, so releases are distributed as binary +wheels as well as the source code. If a binary wheel is not available for your system +(Linux/Windows x86/x64 and Mac x64/arm are covered), you'll need a recent +`Rust toolchain `_ for installing. + Further Reading --------------- - `Static Analysis at Scale: An Instagram Story. `_ @@ -137,7 +142,9 @@ Further Reading Development ----------- -Start by setting up and activating a virtualenv: +You'll need a recent `Rust toolchain `_ for developing. + +Then, start by setting up and activating a virtualenv: .. code-block:: shell diff --git a/native/libcst/README.md b/native/libcst/README.md index f33563b2..42eb2f6c 100644 --- a/native/libcst/README.md +++ b/native/libcst/README.md @@ -1,66 +1,106 @@ -# libcst_native +# libcst/native -A very experimental native extension to speed up LibCST. This does not currently provide -much performance benefit and is therefore not recommended for general use. +A native extension to enable parsing of new Python grammar in LibCST. -The extension is written in Rust using [PyO3](https://pyo3.rs/). +The extension is written in Rust, and exposed to Python using [PyO3](https://pyo3.rs/). +This is packaged together with libcst, and can be imported from `libcst.native`. When +the `LIBCST_PARSER_TYPE` environment variable is set to `native`, the LibCST APIs use +this module for all parsing. -This installs as a separate python package that LibCST looks for and will import if it's -available. +Later on, the parser library might be packaged separately as +[a Rust crate](https://crates.io). Pull requests towards this are much appreciated. + +## Goals + +1. Adopt the CPython grammar definition as closely as possible to reduce maintenance + burden. This means using a PEG parser. +2. Feature-parity with the pure-python LibCST parser: the API should be easy to use from + Python, support parsing with a target version, bytes and strings as inputs, etc. +3. [future] Performance. The aspirational goal is to be within 2x CPython performance, + which would enable LibCST to be used in interactive use cases (think IDEs). +4. [future] Error recovery. The parser should be able to handle partially complete + documents, returning a CST for the syntactically correct parts, and a list of errors + found. + +## Structure + +The extension is organized into two rust crates: `libcst_derive` contains some macros to +facilitate various features of CST nodes, and `libcst` contains the `parser` itself +(including the Python grammar), a `tokenizer` implementation by @bgw, and a very basic +representation of CST `nodes`. Parsing is done by +1. **tokenizing** the input utf-8 string (bytes are not supported at the Rust layer, + they are converted to utf-8 strings by the python wrapper) +2. running the **PEG parser** on the tokenized input, which also captures certain anchor + tokens in the resulting syntax tree +3. using the anchor tokens to **inflate** the syntax tree into a proper CST + +These steps are wrapped into a high-level `parse_module` API +[here](https://github.com/Instagram/LibCST/blob/main/native/libcst/src/lib.rs#L43), +along with `parse_statement` and `parse_expression` functions which all just accept the +input string and an optional encoding. + +These Rust functions are exposed to Python +[here](https://github.com/Instagram/LibCST/blob/main/native/libcst/src/py.rs) using the +excellent [PyO3](https://pyo3.rs/) library, plus an `IntoPy` trait which is mostly +implemented via a macro in `libcst_derive`. -## Using with LibCST +## Hacking -[Set up a rust development environment](https://www.rust-lang.org/tools/install). Using -`rustup` is recommended, but not necessary. Rust 1.45.0+ should work. +### Grammar -Follow the instructions for setting up a virtualenv in the top-level README, then: +The grammar is mostly a straightforward translation from the [CPython +grammar](https://github.com/python/cpython/blob/main/Grammar/python.gram), with some +exceptions: -``` -cd libcst_native -maturin develop # install libcst_native to the virtualenv -cd .. # cd back into the main project -python -m unittest -``` +* The output of grammar rules are deflated CST nodes that capture the AST plus + additional anchor token references used for whitespace parsing later on. +* Rules in the grammar must be strongly typed, as enforced by the Rust compiler. The + CPython grammar rules are a bit more loosely-typed in comparison. +* Some features in the CPython peg parser are not supported by rust-peg: keywords, + mutually recursive rules, special `invalid_` rules, the `~` operator, terminating the + parser early. -This will run the python test suite. Nothing special is required to use `libcst_native`, -since `libcst` will automatically use the native extension when it's installed. +The PEG parser is run on a `Vec` of `Token`s, and tries its best to avoid allocating any +strings, working only with references. As such, the output nodes don't own any strings, +but refer to slices of the original input (hence the `'a` lifetime parameter on almost +all nodes). -When benchmarking this code, make sure to run `maturin develop` with the `--release` -flag to enable compiler optimizations. +### Whitespace parsing -You can disable the native extension by uninstalling the package from your virtualenv: +The `Inflate` trait is responsible for taking a "deflated", skeleton CST node, and +parsing out the relevant whitespace from the anchor tokens to produce an "inflated" +(normal) CST node. In addition to the deflated node, inflation requires a whitespace +config object which contains global information required for certain aspects of +whitespace parsing, like the default indentation. -``` -pip uninstall libcst_native -``` +Inflation consumes the deflated node, while mutating the tokens referenced by it. This +is important to make sure whitespace is only ever assigned to at most one CST node. The +`Inflate` trait implementation needs to ensure that all whitespace is assigned to a CST +node; this is generally verified using roundtrip tests (i.e. parsing code and then +generating it back to then assert the original and generated are byte-by-byte equal). +The general convention is that the top-most possible node owns a certain piece of +whitespace, which should be straightforward to achieve in a top-down parser like +`Inflate`. In cases where whitespace is shared between sibling nodes, usually the +leftmost node owns the whitespace except in the case of trailing commas and closing +parentheses, where the latter owns the whitespace (for backwards compatibility with the +pure python parser). See the implementation of `inflate_element` for how this is done. -## Rust Tests +### Tests In addition to running the python test suite, you can run some tests written in rust with ``` -cargo test --no-default-features +cd native +cargo test ``` -The `--no-default-features` flag needed to work around an incompatibility between tests -and pyo3's `extension-module` feature. +These include unit and roundtrip tests. +Additionally, some benchmarks can be run on x86-based architectures using `cargo bench`. -## Code Formatting +### Code Formatting Use `cargo fmt` to format your code. - - -## Release - -This isn't currently supported, so there's no releases available, but the end-goal would -be to publish this on PyPI. - -Because this is a native extension, it must be re-built for each platform/architecture. -The per-platform build could be automated using a CI system, [like github -actions][gh-actions]. - -[gh-actions]: https://github.com/PyO3/maturin/blob/master/.github/workflows/release.yml From bd5ede79533f5279b149d66a8e683161cdf5404c Mon Sep 17 00:00:00 2001 From: Arie Bovenberg Date: Sun, 16 Jan 2022 15:14:32 +0100 Subject: [PATCH 191/632] add slots to base classes, @add_slots takes bases into account (#605) * add slots to base classes, @add_slots takes bases into account * state changes in apache 2.0 licensed add_slots --- libcst/_add_slots.py | 19 ++++++-- libcst/_nodes/base.py | 10 +++- libcst/_nodes/expression.py | 39 +++++++++++++-- libcst/_nodes/op.py | 14 ++++++ libcst/_nodes/statement.py | 14 +++++- libcst/_nodes/whitespace.py | 2 + libcst/tests/test_add_slots.py | 46 ++++++++++++++++++ libcst/tests/test_batched_visitor.py | 72 ++++++++++++++-------------- 8 files changed, 170 insertions(+), 46 deletions(-) create mode 100644 libcst/tests/test_add_slots.py diff --git a/libcst/_add_slots.py b/libcst/_add_slots.py index 6e9c0041..bbe2c634 100644 --- a/libcst/_add_slots.py +++ b/libcst/_add_slots.py @@ -1,8 +1,10 @@ # This file is derived from github.com/ericvsmith/dataclasses, and is Apache 2 licensed. # https://github.com/ericvsmith/dataclasses/blob/ae712dd993420d43444f188f452/LICENSE.txt # https://github.com/ericvsmith/dataclasses/blob/ae712dd993420d43444f/dataclass_tools.py +# Changed: takes slots in base classes into account when creating slots import dataclasses +from itertools import chain, filterfalse from typing import Any, Mapping, Type, TypeVar _T = TypeVar("_T") @@ -19,7 +21,14 @@ def add_slots(cls: Type[_T]) -> Type[_T]: # Create a new dict for our new class. cls_dict = dict(cls.__dict__) field_names = tuple(f.name for f in dataclasses.fields(cls)) - cls_dict["__slots__"] = field_names + inherited_slots = set( + chain.from_iterable( + superclass.__dict__.get("__slots__", ()) for superclass in cls.mro() + ) + ) + cls_dict["__slots__"] = tuple( + filterfalse(inherited_slots.__contains__, field_names) + ) for field_name in field_names: # Remove our attributes, if present. They'll still be # available in _MARKER. @@ -50,12 +59,14 @@ def add_slots(cls: Type[_T]) -> Type[_T]: def __getstate__(self: object) -> Mapping[str, Any]: return { - slot: getattr(self, slot) for slot in self.__slots__ if hasattr(self, slot) + field.name: getattr(self, field.name) + for field in dataclasses.fields(self) + if hasattr(self, field.name) } def __setstate__(self: object, state: Mapping[str, Any]) -> None: - for slot, value in state.items(): - object.__setattr__(self, slot, value) + for fieldname, value in state.items(): + object.__setattr__(self, fieldname, value) cls.__getstate__ = __getstate__ cls.__setstate__ = __setstate__ diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index 9173414b..03597641 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -6,7 +6,7 @@ from abc import ABC, abstractmethod from copy import deepcopy from dataclasses import dataclass, field, fields, replace -from typing import Any, cast, Dict, List, Mapping, Sequence, TypeVar, Union +from typing import Any, cast, ClassVar, Dict, List, Mapping, Sequence, TypeVar, Union from libcst._flatten_sentinel import FlattenSentinel from libcst._nodes.internal import CodegenState @@ -109,6 +109,9 @@ def _clone(val: object) -> object: @dataclass(frozen=True) class CSTNode(ABC): + + __slots__: ClassVar[Sequence[str]] = () + def __post_init__(self) -> None: # PERF: It might make more sense to move validation work into the visitor, which # would allow us to avoid validating the tree when parsing a file. @@ -468,6 +471,9 @@ class CSTNode(ABC): class BaseLeaf(CSTNode, ABC): + + __slots__ = () + @property def children(self) -> Sequence[CSTNode]: # override this with an optimized implementation @@ -487,6 +493,8 @@ class BaseValueToken(BaseLeaf, ABC): into the parent CSTNode, and hard-coded into the implementation of _codegen. """ + __slots__ = () + value: str def _codegen_impl(self, state: CodegenState) -> None: diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index dba5faf3..6b86a8b2 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -222,6 +222,8 @@ class _BaseParenthesizedNode(CSTNode, ABC): this to get that functionality. """ + __slots__ = () + lpar: Sequence[LeftParen] = () # Sequence of parenthesis for precedence dictation. rpar: Sequence[RightParen] = () @@ -254,6 +256,8 @@ class BaseExpression(_BaseParenthesizedNode, ABC): An base class for all expressions. :class:`BaseExpression` contains no fields. """ + __slots__ = () + def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: """ Returns true if this expression is safe to be use with a word operator @@ -296,7 +300,7 @@ class BaseAssignTargetExpression(BaseExpression, ABC): `_. """ - pass + __slots__ = () class BaseDelTargetExpression(BaseExpression, ABC): @@ -316,7 +320,7 @@ class BaseDelTargetExpression(BaseExpression, ABC): `_. """ - pass + __slots__ = () @add_slots @@ -393,6 +397,8 @@ class BaseNumber(BaseExpression, ABC): used anywhere that you need to explicitly take any number type. """ + __slots__ = () + def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: """ Numbers are funny. The expression "5in [1,2,3,4,5]" is a valid expression @@ -522,13 +528,16 @@ class BaseString(BaseExpression, ABC): :class:`SimpleString`, :class:`ConcatenatedString`, and :class:`FormattedString`. """ - pass + __slots__ = () StringQuoteLiteral = Literal['"', "'", '"""', "'''"] class _BasePrefixedString(BaseString, ABC): + + __slots__ = () + @property def prefix(self) -> str: """ @@ -699,7 +708,7 @@ class BaseFormattedStringContent(CSTNode, ABC): sequence of :class:`BaseFormattedStringContent` parts. """ - pass + __slots__ = () @add_slots @@ -1415,6 +1424,8 @@ class BaseSlice(CSTNode, ABC): This node is purely for typing. """ + __slots__ = () + @add_slots @dataclass(frozen=True) @@ -2190,6 +2201,8 @@ class _BaseExpressionWithArgs(BaseExpression, ABC): in typing. So, we have common validation functions here. """ + __slots__ = () + #: Sequence of arguments that will be passed to the function call. args: Sequence[Arg] = () @@ -2631,6 +2644,8 @@ class _BaseElementImpl(CSTNode, ABC): An internal base class for :class:`Element` and :class:`DictElement`. """ + __slots__ = () + value: BaseExpression comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT @@ -2668,6 +2683,8 @@ class BaseElement(_BaseElementImpl, ABC): BaseDictElement. """ + __slots__ = () + class BaseDictElement(_BaseElementImpl, ABC): """ @@ -2675,6 +2692,8 @@ class BaseDictElement(_BaseElementImpl, ABC): BaseElement. """ + __slots__ = () + @add_slots @dataclass(frozen=True) @@ -2957,6 +2976,8 @@ class BaseList(BaseExpression, ABC): object when evaluated. """ + __slots__ = () + lbracket: LeftSquareBracket = LeftSquareBracket.field() #: Brackets surrounding the list. rbracket: RightSquareBracket = RightSquareBracket.field() @@ -3037,6 +3058,8 @@ class _BaseSetOrDict(BaseExpression, ABC): shouldn't be exported. """ + __slots__ = () + lbrace: LeftCurlyBrace = LeftCurlyBrace.field() #: Braces surrounding the set or dict. rbrace: RightCurlyBrace = RightCurlyBrace.field() @@ -3062,6 +3085,8 @@ class BaseSet(_BaseSetOrDict, ABC): a set object when evaluated. """ + __slots__ = () + @add_slots @dataclass(frozen=True) @@ -3131,6 +3156,8 @@ class BaseDict(_BaseSetOrDict, ABC): a dict object when evaluated. """ + __slots__ = () + @add_slots @dataclass(frozen=True) @@ -3407,6 +3434,8 @@ class BaseComp(BaseExpression, ABC): :class:`GeneratorExp`, :class:`ListComp`, :class:`SetComp`, and :class:`DictComp`. """ + __slots__ = () + for_in: CompFor @@ -3417,6 +3446,8 @@ class BaseSimpleComp(BaseComp, ABC): ``value``. """ + __slots__ = () + #: The expression evaluated during each iteration of the comprehension. This #: lexically comes before the ``for_in`` clause, but it is semantically the #: inner-most element, evaluated inside the ``for_in`` clause. diff --git a/libcst/_nodes/op.py b/libcst/_nodes/op.py index ea02835a..e19d24d3 100644 --- a/libcst/_nodes/op.py +++ b/libcst/_nodes/op.py @@ -19,6 +19,8 @@ class _BaseOneTokenOp(CSTNode, ABC): Any node that has a static value and needs to own whitespace on both sides. """ + __slots__ = () + whitespace_before: BaseParenthesizableWhitespace whitespace_after: BaseParenthesizableWhitespace @@ -51,6 +53,8 @@ class _BaseTwoTokenOp(CSTNode, ABC): in beteween them. """ + __slots__ = () + whitespace_before: BaseParenthesizableWhitespace whitespace_between: BaseParenthesizableWhitespace @@ -93,6 +97,8 @@ class BaseUnaryOp(CSTNode, ABC): Any node that has a static value used in a :class:`UnaryOperation` expression. """ + __slots__ = () + #: Any space that appears directly after this operator. whitespace_after: BaseParenthesizableWhitespace @@ -119,6 +125,8 @@ class BaseBooleanOp(_BaseOneTokenOp, ABC): This node is purely for typing. """ + __slots__ = () + class BaseBinaryOp(CSTNode, ABC): """ @@ -126,6 +134,8 @@ class BaseBinaryOp(CSTNode, ABC): This node is purely for typing. """ + __slots__ = () + class BaseCompOp(CSTNode, ABC): """ @@ -133,6 +143,8 @@ class BaseCompOp(CSTNode, ABC): This node is purely for typing. """ + __slots__ = () + class BaseAugOp(CSTNode, ABC): """ @@ -140,6 +152,8 @@ class BaseAugOp(CSTNode, ABC): This node is purely for typing. """ + __slots__ = () + @add_slots @dataclass(frozen=True) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 9493f57c..ded7c7c6 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -79,6 +79,8 @@ class BaseSuite(CSTNode, ABC): -- https://docs.python.org/3/reference/compound_stmts.html """ + __slots__ = () + body: Union[Sequence["BaseStatement"], Sequence["BaseSmallStatement"]] @@ -88,7 +90,7 @@ class BaseStatement(CSTNode, ABC): in a particular location. """ - pass + __slots__ = () class BaseSmallStatement(CSTNode, ABC): @@ -99,6 +101,8 @@ class BaseSmallStatement(CSTNode, ABC): simplify type definitions and isinstance checks. """ + __slots__ = () + #: An optional semicolon that appears after a small statement. This is optional #: for the last small statement in a :class:`SimpleStatementLine` or #: :class:`SimpleStatementSuite`, but all other small statements inside a simple @@ -370,6 +374,8 @@ class _BaseSimpleStatement(CSTNode, ABC): small statement. """ + __slots__ = () + #: Sequence of small statements. All but the last statement are required to have #: a semicolon. body: Sequence[BaseSmallStatement] @@ -554,6 +560,8 @@ class BaseCompoundStatement(BaseStatement, ABC): -- https://docs.python.org/3/reference/compound_stmts.html """ + __slots__ = () + #: The body of this compound statement. body: BaseSuite @@ -2633,6 +2641,8 @@ class MatchPattern(_BaseParenthesizedNode, ABC): statement. """ + __slots__ = () + @add_slots @dataclass(frozen=True) @@ -2960,6 +2970,8 @@ class MatchSequence(MatchPattern, ABC): otherwise matches a fixed length sequence. """ + __slots__ = () + #: Patterns to be matched against the subject elements if it is a sequence. patterns: Sequence[Union[MatchSequenceElement, MatchStar]] diff --git a/libcst/_nodes/whitespace.py b/libcst/_nodes/whitespace.py index 686c14fb..b1332c13 100644 --- a/libcst/_nodes/whitespace.py +++ b/libcst/_nodes/whitespace.py @@ -48,6 +48,8 @@ class BaseParenthesizableWhitespace(CSTNode, ABC): ``iftest``), it has some semantic value. """ + __slots__ = () + # TODO: Should we somehow differentiate places where we require non-zero whitespace # with a separate type? diff --git a/libcst/tests/test_add_slots.py b/libcst/tests/test_add_slots.py new file mode 100644 index 00000000..e354f60b --- /dev/null +++ b/libcst/tests/test_add_slots.py @@ -0,0 +1,46 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pickle +from dataclasses import dataclass +from typing import ClassVar + +from libcst._add_slots import add_slots + +from libcst.testing.utils import UnitTest + + +# this test class needs to be defined at module level to test pickling. +@add_slots +@dataclass(frozen=True) +class A: + x: int + y: str + + Z: ClassVar[int] = 5 + + +class AddSlotsTest(UnitTest): + def test_pickle(self) -> None: + a = A(1, "foo") + self.assertEqual(a, pickle.loads(pickle.dumps(a))) + object.__delattr__(a, "y") + self.assertEqual(a.x, pickle.loads(pickle.dumps(a)).x) + + def test_prevents_slots_overlap(self) -> None: + class A: + __slots__ = ("x",) + + class B(A): + __slots__ = ("z",) + + @add_slots + @dataclass + class C(B): + x: int + y: str + z: bool + + self.assertSequenceEqual(C.__slots__, ("y",)) diff --git a/libcst/tests/test_batched_visitor.py b/libcst/tests/test_batched_visitor.py index 9bcc562f..9009847c 100644 --- a/libcst/tests/test_batched_visitor.py +++ b/libcst/tests/test_batched_visitor.py @@ -16,57 +16,57 @@ class BatchedVisitorTest(UnitTest): mock = Mock() class ABatchable(BatchableCSTVisitor): - def visit_Pass(self, node: cst.Pass) -> None: - mock.visited_a() - object.__setattr__(node, "a_attr", True) + def visit_Del(self, node: cst.Del) -> None: + object.__setattr__(node, "target", mock.visited_a()) class BBatchable(BatchableCSTVisitor): - def visit_Pass(self, node: cst.Pass) -> None: - mock.visited_b() - object.__setattr__(node, "b_attr", 1) + def visit_Del(self, node: cst.Del) -> None: + object.__setattr__(node, "semicolon", mock.visited_b()) - module = visit_batched(parse_module("pass"), [ABatchable(), BBatchable()]) - pass_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] - - # Check properties were set - self.assertEqual(object.__getattribute__(pass_, "a_attr"), True) - self.assertEqual(object.__getattribute__(pass_, "b_attr"), 1) + module = visit_batched(parse_module("del a"), [ABatchable(), BBatchable()]) + del_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] # Check that each visitor was only called once mock.visited_a.assert_called_once() mock.visited_b.assert_called_once() + # Check properties were set + self.assertEqual(object.__getattribute__(del_, "target"), mock.visited_a()) + self.assertEqual(object.__getattribute__(del_, "semicolon"), mock.visited_b()) + def test_all_visits(self) -> None: mock = Mock() class Batchable(BatchableCSTVisitor): - def visit_Pass(self, node: cst.Pass) -> None: - mock.visit_Pass() - object.__setattr__(node, "visit_Pass", True) + def visit_If(self, node: cst.If) -> None: + object.__setattr__(node, "test", mock.visit_If()) - def visit_Pass_semicolon(self, node: cst.Pass) -> None: - mock.visit_Pass_semicolon() - object.__setattr__(node, "visit_Pass_semicolon", True) + def visit_If_body(self, node: cst.If) -> None: + object.__setattr__(node, "leading_lines", mock.visit_If_body()) - def leave_Pass_semicolon(self, node: cst.Pass) -> None: - mock.leave_Pass_semicolon() - object.__setattr__(node, "leave_Pass_semicolon", True) + def leave_If_body(self, node: cst.If) -> None: + object.__setattr__(node, "orelse", mock.leave_If_body()) - def leave_Pass(self, original_node: cst.Pass) -> None: - mock.leave_Pass() - object.__setattr__(original_node, "leave_Pass", True) + def leave_If(self, original_node: cst.If) -> None: + object.__setattr__( + original_node, "whitespace_before_test", mock.leave_If() + ) - module = visit_batched(parse_module("pass"), [Batchable()]) - pass_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] - - # Check properties were set - self.assertEqual(object.__getattribute__(pass_, "visit_Pass"), True) - self.assertEqual(object.__getattribute__(pass_, "leave_Pass"), True) - self.assertEqual(object.__getattribute__(pass_, "visit_Pass_semicolon"), True) - self.assertEqual(object.__getattribute__(pass_, "leave_Pass_semicolon"), True) + module = visit_batched(parse_module("if True: pass"), [Batchable()]) + if_ = cast(cst.SimpleStatementLine, module.body[0]) # Check that each visitor was only called once - mock.visit_Pass.assert_called_once() - mock.leave_Pass.assert_called_once() - mock.visit_Pass_semicolon.assert_called_once() - mock.leave_Pass_semicolon.assert_called_once() + mock.visit_If.assert_called_once() + mock.leave_If.assert_called_once() + mock.visit_If_body.assert_called_once() + mock.leave_If_body.assert_called_once() + + # Check properties were set + self.assertEqual(object.__getattribute__(if_, "test"), mock.visit_If()) + self.assertEqual( + object.__getattribute__(if_, "leading_lines"), mock.visit_If_body() + ) + self.assertEqual(object.__getattribute__(if_, "orelse"), mock.leave_If_body()) + self.assertEqual( + object.__getattribute__(if_, "whitespace_before_test"), mock.leave_If() + ) From cafbfac15092646dc555c13fbf7201f664af523b Mon Sep 17 00:00:00 2001 From: Sehyo Chang Date: Sun, 16 Jan 2022 10:46:54 -0800 Subject: [PATCH 192/632] change pyo3 as optional dependency in native Python Parser (#598) --- .github/workflows/build.yml | 6 + native/libcst/Cargo.toml | 5 +- native/libcst/src/lib.rs | 1 + native/libcst/src/nodes/expression.rs | 356 ++++++++++++++++---------- native/libcst/src/nodes/module.rs | 4 +- native/libcst/src/nodes/op.rs | 135 +++++----- native/libcst/src/nodes/statement.rs | 203 +++++++++------ native/libcst/src/nodes/whitespace.rs | 22 +- native/libcst/src/parser/errors.rs | 112 ++++---- 9 files changed, 507 insertions(+), 337 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 68359560..7e93f9e6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -260,6 +260,12 @@ jobs: with: command: test args: --manifest-path=native/Cargo.toml --release + - name: test without python + if: matrix.os == 'ubuntu-latest' + uses: actions-rs/cargo@v1 + with: + command: test + args: --manifest-path=native/Cargo.toml --release --no-default-features - name: clippy uses: actions-rs/clippy-check@v1 with: diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 2247abf3..b0a9b9df 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -23,12 +23,13 @@ path = "src/bin.rs" # # Once https://github.com/PyO3/pyo3/pull/1123 lands, it may be better to use # `-Zextra-link-arg` for this instead. -default = ["pyo3/extension-module"] +default = ["py"] +py = ["pyo3","pyo3/extension-module"] trace = ["peg/trace"] [dependencies] paste = "1.0.4" -pyo3 = "0.14.4" +pyo3 = { version = "0.14.4", optional = true } thiserror = "1.0.23" peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "4b146b4b78a80c07e43d7ace2d97f65bfde279a8" } chic = "1.2.2" diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index e4355997..997ac0bc 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -16,6 +16,7 @@ pub use nodes::*; mod parser; use parser::{ParserError, Result}; +#[cfg(feature = "py")] pub mod py; pub fn tokenize(text: &str) -> Result> { diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index c55e327d..53801c46 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -18,12 +18,14 @@ use crate::{ Token, }, }; -use libcst_derive::{Codegen, Inflate, IntoPy, ParenthesizedNode}; -use pyo3::{types::PyModule, IntoPy}; +#[cfg(feature = "py")] +use libcst_derive::IntoPy; +use libcst_derive::{Codegen, Inflate, ParenthesizedNode}; type TokenRef<'a> = Rc>; -#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Default, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Parameters<'a> { pub params: Vec>, pub star_arg: Option>, @@ -57,7 +59,8 @@ impl<'a> Inflate<'a> for Parameters<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Inflate, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum StarArg<'a> { Star(ParamStar<'a>), Param(Box>), @@ -117,7 +120,8 @@ impl<'a> Codegen<'a> for Parameters<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ParamSlash<'a> { pub comma: Option>, } @@ -140,7 +144,8 @@ impl<'a> Inflate<'a> for ParamSlash<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ParamStar<'a> { pub comma: Comma<'a>, } @@ -159,7 +164,8 @@ impl<'a> Inflate<'a> for ParamStar<'a> { } } -#[derive(Debug, Eq, PartialEq, Default, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, Eq, PartialEq, Default, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Name<'a> { pub value: &'a str, pub lpar: Vec>, @@ -182,7 +188,8 @@ impl<'a> Codegen<'a> for Name<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Param<'a> { pub name: Name<'a>, pub annotation: Option>, @@ -274,7 +281,8 @@ impl<'a> Param<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Arg<'a> { pub value: Expression<'a>, pub keyword: Option>, @@ -337,7 +345,8 @@ impl<'a> WithComma<'a> for Arg<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct LeftParen<'a> { /// Any space that appears directly after this left parenthesis. pub whitespace_after: ParenthesizableWhitespace<'a>, @@ -362,7 +371,8 @@ impl<'a> Inflate<'a> for LeftParen<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct RightParen<'a> { /// Any space that appears directly before this right parenthesis. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -388,7 +398,8 @@ impl<'a> Inflate<'a> for RightParen<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, Eq, PartialEq, Clone, ParenthesizedNode, Codegen, Inflate, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone, ParenthesizedNode, Codegen, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum Expression<'a> { Name(Name<'a>), Ellipsis(Ellipsis<'a>), @@ -421,7 +432,8 @@ pub enum Expression<'a> { NamedExpr(NamedExpr<'a>), } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Ellipsis<'a> { pub lpar: Vec>, pub rpar: Vec>, @@ -442,7 +454,8 @@ impl<'a> Inflate<'a> for Ellipsis<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Integer<'a> { /// A string representation of the integer, such as ``"100000"`` or /// ``"100_000"``. @@ -467,7 +480,8 @@ impl<'a> Inflate<'a> for Integer<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Float<'a> { /// A string representation of the floating point number, such as ```"0.05"``, /// ``".050"``, or ``"5e-2"``. @@ -492,7 +506,8 @@ impl<'a> Inflate<'a> for Float<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Imaginary<'a> { /// A string representation of the complex number, such as ``"2j"`` pub value: &'a str, @@ -516,7 +531,8 @@ impl<'a> Inflate<'a> for Imaginary<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Comparison<'a> { pub left: Box>, pub comparisons: Vec>, @@ -544,7 +560,8 @@ impl<'a> Inflate<'a> for Comparison<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct UnaryOperation<'a> { pub operator: UnaryOp<'a>, pub expression: Box>, @@ -571,7 +588,8 @@ impl<'a> Inflate<'a> for UnaryOperation<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct BinaryOperation<'a> { pub left: Box>, pub operator: BinaryOp<'a>, @@ -601,7 +619,8 @@ impl<'a> Inflate<'a> for BinaryOperation<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct BooleanOperation<'a> { pub left: Box>, pub operator: BooleanOp<'a>, @@ -631,7 +650,8 @@ impl<'a> Inflate<'a> for BooleanOperation<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Call<'a> { pub func: Box>, pub args: Vec>, @@ -688,7 +708,8 @@ impl<'a> Codegen<'a> for Call<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Attribute<'a> { pub value: Box>, pub attr: Name<'a>, @@ -719,7 +740,8 @@ impl<'a> Codegen<'a> for Attribute<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum NameOrAttribute<'a> { N(Name<'a>), A(Attribute<'a>), @@ -734,7 +756,8 @@ impl<'a> std::convert::From> for Expression<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ComparisonTarget<'a> { pub operator: CompOp<'a>, pub comparator: Expression<'a>, @@ -755,7 +778,8 @@ impl<'a> Inflate<'a> for ComparisonTarget<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct StarredElement<'a> { pub value: Box>, pub comma: Option>, @@ -812,33 +836,6 @@ pub enum Element<'a> { Starred(StarredElement<'a>), } -// TODO: this could be a derive helper attribute to override the python class name -impl<'a> IntoPy for Element<'a> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - match self { - Self::Starred(s) => s.into_py(py), - Self::Simple { value, comma } => { - let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); - let kwargs = [ - Some(("value", value.into_py(py))), - comma.map(|x| ("comma", x.into_py(py))), - ] - .iter() - .filter(|x| x.is_some()) - .map(|x| x.as_ref().unwrap()) - .collect::>() - .into_py_dict(py); - libcst - .getattr("Element") - .expect("no Element found in libcst") - .call((), Some(kwargs)) - .expect("conversion failed") - .into() - } - } - } -} - impl<'a> Element<'a> { fn codegen( &self, @@ -897,7 +894,8 @@ impl<'a> std::convert::From> for Element<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Tuple<'a> { pub elements: Vec>, pub lpar: Vec>, @@ -937,7 +935,8 @@ impl<'a> Codegen<'a> for Tuple<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct GeneratorExp<'a> { pub elt: Box>, pub for_in: Box>, @@ -964,7 +963,8 @@ impl<'a> Inflate<'a> for GeneratorExp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ListComp<'a> { pub elt: Box>, pub for_in: Box>, @@ -997,7 +997,8 @@ impl<'a> Inflate<'a> for ListComp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct LeftSquareBracket<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1020,7 +1021,8 @@ impl<'a> Inflate<'a> for LeftSquareBracket<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct RightSquareBracket<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1043,7 +1045,8 @@ impl<'a> Inflate<'a> for RightSquareBracket<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct SetComp<'a> { pub elt: Box>, pub for_in: Box>, @@ -1076,7 +1079,8 @@ impl<'a> Codegen<'a> for SetComp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct DictComp<'a> { pub key: Box>, pub value: Box>, @@ -1127,7 +1131,8 @@ impl<'a> Codegen<'a> for DictComp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct LeftCurlyBrace<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1150,7 +1155,8 @@ impl<'a> Codegen<'a> for LeftCurlyBrace<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct RightCurlyBrace<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1173,13 +1179,8 @@ impl<'a> Codegen<'a> for RightCurlyBrace<'a> { } } -impl<'a> pyo3::conversion::IntoPy for Box> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - (*self).into_py(py) - } -} - -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct CompFor<'a> { pub target: AssignTargetExpression<'a>, pub iter: Expression<'a>, @@ -1254,7 +1255,8 @@ impl<'a> Inflate<'a> for CompFor<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Asynchronous<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, } @@ -1266,7 +1268,8 @@ impl<'a> Codegen<'a> for Asynchronous<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct CompIf<'a> { pub test: Expression<'a>, pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -1299,7 +1302,8 @@ impl<'a> Inflate<'a> for CompIf<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct List<'a> { pub elements: Vec>, pub lbracket: LeftSquareBracket<'a>, @@ -1341,7 +1345,8 @@ impl<'a> Codegen<'a> for List<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Set<'a> { pub elements: Vec>, pub lbrace: LeftCurlyBrace<'a>, @@ -1382,7 +1387,8 @@ impl<'a> Codegen<'a> for Set<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Dict<'a> { pub elements: Vec>, pub lbrace: LeftCurlyBrace<'a>, @@ -1437,46 +1443,6 @@ pub enum DictElement<'a> { Starred(StarredDictElement<'a>), } -// TODO: this could be a derive helper attribute to override the python class name -impl<'a> IntoPy for DictElement<'a> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - match self { - Self::Starred(s) => s.into_py(py), - Self::Simple { - key, - value, - comma, - whitespace_after_colon, - whitespace_before_colon, - .. - } => { - let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); - let kwargs = [ - Some(("key", key.into_py(py))), - Some(("value", value.into_py(py))), - Some(( - "whitespace_before_colon", - whitespace_before_colon.into_py(py), - )), - Some(("whitespace_after_colon", whitespace_after_colon.into_py(py))), - comma.map(|x| ("comma", x.into_py(py))), - ] - .iter() - .filter(|x| x.is_some()) - .map(|x| x.as_ref().unwrap()) - .collect::>() - .into_py_dict(py); - libcst - .getattr("DictElement") - .expect("no Element found in libcst") - .call((), Some(kwargs)) - .expect("conversion failed") - .into() - } - } - } -} - impl<'a> DictElement<'a> { pub fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result { Ok(match self { @@ -1574,7 +1540,8 @@ impl<'a> WithComma<'a> for DictElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct StarredDictElement<'a> { pub value: Expression<'a>, pub comma: Option>, @@ -1611,13 +1578,15 @@ impl<'a> Codegen<'a> for StarredDictElement<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum BaseSlice<'a> { Index(Index<'a>), Slice(Slice<'a>), } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Index<'a> { pub value: Expression<'a>, } @@ -1635,11 +1604,12 @@ impl<'a> Codegen<'a> for Index<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Slice<'a> { - #[no_py_default] + #[cfg_attr(feature = "py", no_py_default)] pub lower: Option>, - #[no_py_default] + #[cfg_attr(feature = "py", no_py_default)] pub upper: Option>, pub step: Option>, pub first_colon: Colon<'a>, @@ -1677,7 +1647,8 @@ impl<'a> Codegen<'a> for Slice<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct SubscriptElement<'a> { pub slice: BaseSlice<'a>, pub comma: Option>, @@ -1700,7 +1671,8 @@ impl<'a> Codegen<'a> for SubscriptElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Subscript<'a> { pub value: Box>, pub slice: Vec>, @@ -1747,7 +1719,8 @@ impl<'a> Codegen<'a> for Subscript<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct IfExp<'a> { pub test: Box>, pub body: Box>, @@ -1806,7 +1779,8 @@ impl<'a> Codegen<'a> for IfExp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Lambda<'a> { pub params: Box>, pub body: Box>, @@ -1853,7 +1827,8 @@ impl<'a> Codegen<'a> for Lambda<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct From<'a> { pub item: Expression<'a>, pub whitespace_before_from: Option>, @@ -1891,7 +1866,8 @@ impl<'a> Inflate<'a> for From<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum YieldValue<'a> { Expression(Expression<'a>), From(From<'a>), @@ -1919,13 +1895,8 @@ impl<'a> YieldValue<'a> { } } -impl<'a> pyo3::conversion::IntoPy for Box> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - (*self).into_py(py) - } -} - -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Yield<'a> { pub value: Option>>, pub lpar: Vec>, @@ -1967,7 +1938,8 @@ impl<'a> Codegen<'a> for Yield<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Await<'a> { pub expression: Box>, pub lpar: Vec>, @@ -2001,7 +1973,8 @@ impl<'a> Codegen<'a> for Await<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum String<'a> { Simple(SimpleString<'a>), Concatenated(ConcatenatedString<'a>), @@ -2018,7 +1991,8 @@ impl<'a> std::convert::From> for Expression<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ConcatenatedString<'a> { pub left: Box>, pub right: Box>, @@ -2055,7 +2029,8 @@ impl<'a> Codegen<'a> for ConcatenatedString<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct SimpleString<'a> { /// The texual representation of the string, including quotes, prefix /// characters, and any escape characters present in the original source code, @@ -2079,7 +2054,8 @@ impl<'a> Codegen<'a> for SimpleString<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct FormattedStringText<'a> { pub value: &'a str, } @@ -2096,7 +2072,8 @@ impl<'a> Codegen<'a> for FormattedStringText<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct FormattedStringExpression<'a> { pub expression: Expression<'a>, pub conversion: Option<&'a str>, @@ -2154,13 +2131,15 @@ impl<'a> Codegen<'a> for FormattedStringExpression<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum FormattedStringContent<'a> { Text(FormattedStringText<'a>), Expression(FormattedStringExpression<'a>), } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct FormattedString<'a> { pub parts: Vec>, pub start: &'a str, @@ -2190,7 +2169,8 @@ impl<'a> Codegen<'a> for FormattedString<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct NamedExpr<'a> { pub target: Box>, pub value: Box>, @@ -2232,3 +2212,97 @@ impl<'a> Inflate<'a> for NamedExpr<'a> { Ok(self) } } + +#[cfg(feature = "py")] +mod py { + + use pyo3::{types::PyModule, IntoPy}; + + use super::*; + use crate::OrElse; + + // TODO: this could be a derive helper attribute to override the python class name + impl<'a> IntoPy for Element<'a> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + match self { + Self::Starred(s) => s.into_py(py), + Self::Simple { value, comma } => { + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + let kwargs = [ + Some(("value", value.into_py(py))), + comma.map(|x| ("comma", x.into_py(py))), + ] + .iter() + .filter(|x| x.is_some()) + .map(|x| x.as_ref().unwrap()) + .collect::>() + .into_py_dict(py); + libcst + .getattr("Element") + .expect("no Element found in libcst") + .call((), Some(kwargs)) + .expect("conversion failed") + .into() + } + } + } + } + + // TODO: this could be a derive helper attribute to override the python class name + impl<'a> IntoPy for DictElement<'a> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + match self { + Self::Starred(s) => s.into_py(py), + Self::Simple { + key, + value, + comma, + whitespace_after_colon, + whitespace_before_colon, + .. + } => { + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + let kwargs = [ + Some(("key", key.into_py(py))), + Some(("value", value.into_py(py))), + Some(( + "whitespace_before_colon", + whitespace_before_colon.into_py(py), + )), + Some(("whitespace_after_colon", whitespace_after_colon.into_py(py))), + comma.map(|x| ("comma", x.into_py(py))), + ] + .iter() + .filter(|x| x.is_some()) + .map(|x| x.as_ref().unwrap()) + .collect::>() + .into_py_dict(py); + libcst + .getattr("DictElement") + .expect("no Element found in libcst") + .call((), Some(kwargs)) + .expect("conversion failed") + .into() + } + } + } + } + + impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } + } + + impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } + } + + impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } + } +} diff --git a/native/libcst/src/nodes/module.rs b/native/libcst/src/nodes/module.rs index 03c6afb5..21e7e66c 100644 --- a/native/libcst/src/nodes/module.rs +++ b/native/libcst/src/nodes/module.rs @@ -16,13 +16,15 @@ use crate::{ }, tokenizer::whitespace_parser::Config, }; +#[cfg(feature = "py")] use libcst_derive::IntoPy; use super::traits::{Inflate, Result, WithLeadingLines}; type TokenRef<'a> = Rc>; -#[derive(Debug, Eq, PartialEq, IntoPy)] +#[derive(Debug, Eq, PartialEq)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Module<'a> { pub body: Vec>, pub header: Vec>, diff --git a/native/libcst/src/nodes/op.rs b/native/libcst/src/nodes/op.rs index 48b9839f..afa97d5d 100644 --- a/native/libcst/src/nodes/op.rs +++ b/native/libcst/src/nodes/op.rs @@ -13,18 +13,20 @@ use crate::{ Token, }, }; +#[cfg(feature = "py")] use libcst_derive::IntoPy; type TokenRef<'a> = Rc>; -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Semicolon<'a> { /// Any space that appears directly before this semicolon. pub whitespace_before: ParenthesizableWhitespace<'a>, /// Any space that appears directly after this semicolon. pub whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] pub(crate) tok: TokenRef<'a>, } @@ -48,14 +50,15 @@ impl<'a> Inflate<'a> for Semicolon<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Comma<'a> { /// Any space that appears directly before this comma. pub whitespace_before: ParenthesizableWhitespace<'a>, /// Any space that appears directly after this comma. pub whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] pub(crate) tok: TokenRef<'a>, } @@ -91,14 +94,15 @@ impl<'a> Comma<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct AssignEqual<'a> { /// Any space that appears directly before this equal sign. pub whitespace_before: ParenthesizableWhitespace<'a>, /// Any space that appears directly after this equal sign. pub whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] pub(crate) tok: TokenRef<'a>, } @@ -124,14 +128,15 @@ impl<'a> Inflate<'a> for AssignEqual<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Dot<'a> { /// Any space that appears directly before this dot. pub whitespace_before: ParenthesizableWhitespace<'a>, /// Any space that appears directly after this dot. pub whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] pub(crate) tok: TokenRef<'a>, } @@ -169,7 +174,8 @@ impl<'a> Dot<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ImportStar {} impl<'a> Codegen<'a> for ImportStar { @@ -184,26 +190,27 @@ impl<'a> Inflate<'a> for ImportStar { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum UnaryOp<'a> { Plus { whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Minus { whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, BitInvert { whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Not { whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, } @@ -276,18 +283,19 @@ impl<'a> Inflate<'a> for UnaryOp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum BooleanOp<'a> { And { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Or { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, } @@ -349,84 +357,85 @@ impl<'a> Inflate<'a> for BooleanOp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum BinaryOp<'a> { Add { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Subtract { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Multiply { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Divide { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, FloorDivide { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Modulo { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Power { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, LeftShift { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, RightShift { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, BitOr { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, BitAnd { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, BitXor { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, MatrixMultiply { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, } @@ -708,72 +717,73 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum CompOp<'a> { LessThan { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, GreaterThan { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, LessThanEqual { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, GreaterThanEqual { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, Equal { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, NotEqual { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, In { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, NotIn { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_between: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] not_tok: TokenRef<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] in_tok: TokenRef<'a>, }, Is { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, IsNot { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_between: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] is_tok: TokenRef<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] not_tok: TokenRef<'a>, }, } @@ -1029,12 +1039,13 @@ impl<'a> Inflate<'a> for CompOp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Colon<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] pub(crate) tok: TokenRef<'a>, } @@ -1060,84 +1071,85 @@ impl<'a> Codegen<'a> for Colon<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum AugOp<'a> { AddAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, SubtractAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, MultiplyAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, MatrixMultiplyAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, DivideAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, ModuloAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, BitAndAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, BitOrAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, BitXorAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, LeftShiftAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, RightShiftAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, PowerAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, FloorDivideAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] tok: TokenRef<'a>, }, } @@ -1419,7 +1431,8 @@ impl<'a> Codegen<'a> for AugOp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct BitOr<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub whitespace_after: ParenthesizableWhitespace<'a>, diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 21725343..93f540d0 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -25,12 +25,15 @@ use crate::{ }, LeftCurlyBrace, LeftSquareBracket, RightCurlyBrace, RightSquareBracket, }; -use libcst_derive::{Codegen, Inflate, IntoPy, ParenthesizedNode}; +#[cfg(feature = "py")] +use libcst_derive::IntoPy; +use libcst_derive::{Codegen, Inflate, ParenthesizedNode}; type TokenRef<'a> = Rc>; #[allow(clippy::large_enum_variant)] -#[derive(Debug, Eq, PartialEq, Clone, Inflate, Codegen, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone, Inflate, Codegen)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum Statement<'a> { Simple(SimpleStatementLine<'a>), Compound(CompoundStatement<'a>), @@ -45,7 +48,8 @@ impl<'a> WithLeadingLines<'a> for Statement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] +#[cfg_attr(feature = "py", derive(IntoPy))] #[allow(clippy::large_enum_variant)] pub enum CompoundStatement<'a> { FunctionDef(FunctionDef<'a>), @@ -75,13 +79,15 @@ impl<'a> WithLeadingLines<'a> for CompoundStatement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum Suite<'a> { IndentedBlock(IndentedBlock<'a>), SimpleStatementSuite(SimpleStatementSuite<'a>), } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct IndentedBlock<'a> { /// Sequence of statements belonging to this indented block. pub body: Vec>, @@ -171,7 +177,8 @@ impl<'a> Inflate<'a> for IndentedBlock<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct SimpleStatementSuite<'a> { /// Sequence of small statements. All but the last statement are required to have /// a semicolon. @@ -225,7 +232,8 @@ impl<'a> Codegen<'a> for SimpleStatementSuite<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct SimpleStatementLine<'a> { /// Sequence of small statements. All but the last statement are required to have /// a semicolon. @@ -267,7 +275,8 @@ impl<'a> Inflate<'a> for SimpleStatementLine<'a> { } #[allow(dead_code, clippy::large_enum_variant)] -#[derive(Debug, Eq, PartialEq, Clone, Codegen, Inflate, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone, Codegen, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum SmallStatement<'a> { Pass(Pass<'a>), Break(Break<'a>), @@ -308,7 +317,8 @@ impl<'a> SmallStatement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Pass<'a> { pub semicolon: Option>, } @@ -330,7 +340,8 @@ impl<'a> Inflate<'a> for Pass<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Break<'a> { pub semicolon: Option>, } @@ -352,7 +363,8 @@ impl<'a> Inflate<'a> for Break<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Continue<'a> { pub semicolon: Option>, } @@ -374,7 +386,8 @@ impl<'a> Inflate<'a> for Continue<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Expr<'a> { pub value: Expression<'a>, pub semicolon: Option>, @@ -398,7 +411,8 @@ impl<'a> Inflate<'a> for Expr<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Assign<'a> { pub targets: Vec>, pub value: Expression<'a>, @@ -432,7 +446,8 @@ impl<'a> Assign<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct AssignTarget<'a> { pub target: AssignTargetExpression<'a>, pub whitespace_before_equal: SimpleWhitespace<'a>, @@ -464,7 +479,8 @@ impl<'a> Inflate<'a> for AssignTarget<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum AssignTargetExpression<'a> { Name(Name<'a>), Attribute(Attribute<'a>), @@ -474,7 +490,8 @@ pub enum AssignTargetExpression<'a> { Subscript(Subscript<'a>), } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Import<'a> { pub names: Vec>, pub semicolon: Option>, @@ -517,9 +534,10 @@ impl<'a> Import<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ImportFrom<'a> { - #[no_py_default] + #[cfg_attr(feature = "py", no_py_default)] pub module: Option>, pub names: ImportNames<'a>, pub relative: Vec>, @@ -632,7 +650,8 @@ impl<'a> ImportFrom<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ImportAlias<'a> { pub name: NameOrAttribute<'a>, pub asname: Option>, @@ -667,7 +686,8 @@ impl<'a> Codegen<'a> for ImportAlias<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct AsName<'a> { pub name: AssignTargetExpression<'a>, pub whitespace_before_as: ParenthesizableWhitespace<'a>, @@ -700,7 +720,8 @@ impl<'a> Inflate<'a> for AsName<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Inflate, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum ImportNames<'a> { Star(ImportStar), Aliases(Vec>), @@ -722,7 +743,8 @@ impl<'a> Codegen<'a> for ImportNames<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct FunctionDef<'a> { pub name: Name<'a>, pub params: Parameters<'a>, @@ -847,7 +869,8 @@ impl<'a> Inflate<'a> for FunctionDef<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Decorator<'a> { pub decorator: Expression<'a>, pub leading_lines: Vec>, @@ -889,13 +912,8 @@ impl<'a> Inflate<'a> for Decorator<'a> { } } -impl<'a> pyo3::conversion::IntoPy for Box> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - (*self).into_py(py) - } -} - -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct If<'a> { /// The expression that, when evaluated, should give us a truthy value pub test: Expression<'a>, @@ -916,7 +934,7 @@ pub struct If<'a> { pub whitespace_after_test: SimpleWhitespace<'a>, /// Signifies if this instance represents an ``elif`` or an ``if`` block. - #[skip_py] + #[cfg_attr(feature = "py", skip_py)] pub is_elif: bool, pub(crate) if_tok: TokenRef<'a>, @@ -964,13 +982,15 @@ impl<'a> Inflate<'a> for If<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum OrElse<'a> { Elif(If<'a>), Else(Else<'a>), } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Else<'a> { pub body: Suite<'a>, /// Sequence of empty lines appearing before this compound statement line. @@ -1013,7 +1033,8 @@ impl<'a> Inflate<'a> for Else<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Annotation<'a> { pub annotation: Expression<'a>, pub whitespace_before_indicator: Option>, @@ -1053,7 +1074,8 @@ impl<'a> Inflate<'a> for Annotation<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct AnnAssign<'a> { pub target: AssignTargetExpression<'a>, pub annotation: Annotation<'a>, @@ -1098,7 +1120,8 @@ impl<'a> AnnAssign<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Return<'a> { pub value: Option>, pub whitespace_after_return: Option>, @@ -1149,7 +1172,8 @@ impl<'a> Return<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Assert<'a> { pub test: Expression<'a>, pub msg: Option>, @@ -1200,7 +1224,8 @@ impl<'a> Assert<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Raise<'a> { pub exc: Option>, pub cause: Option>, @@ -1262,7 +1287,8 @@ impl<'a> Raise<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct NameItem<'a> { pub name: Name<'a>, pub comma: Option>, @@ -1287,7 +1313,8 @@ impl<'a> NameItem<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Global<'a> { pub names: Vec>, pub whitespace_after_global: SimpleWhitespace<'a>, @@ -1327,7 +1354,8 @@ impl<'a> Global<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Nonlocal<'a> { pub names: Vec>, pub whitespace_after_nonlocal: SimpleWhitespace<'a>, @@ -1367,7 +1395,8 @@ impl<'a> Nonlocal<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct For<'a> { pub target: AssignTargetExpression<'a>, pub iter: Expression<'a>, @@ -1459,7 +1488,8 @@ impl<'a> Inflate<'a> for For<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct While<'a> { pub test: Expression<'a>, pub body: Suite<'a>, @@ -1512,7 +1542,8 @@ impl<'a> Inflate<'a> for While<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ClassDef<'a> { pub name: Name<'a>, pub body: Suite<'a>, @@ -1618,7 +1649,8 @@ impl<'a> ClassDef<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Finally<'a> { pub body: Suite<'a>, pub leading_lines: Vec>, @@ -1658,7 +1690,8 @@ impl<'a> Inflate<'a> for Finally<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ExceptHandler<'a> { pub body: Suite<'a>, pub r#type: Option>, @@ -1718,7 +1751,8 @@ impl<'a> Inflate<'a> for ExceptHandler<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ExceptStarHandler<'a> { pub body: Suite<'a>, pub r#type: Expression<'a>, @@ -1780,7 +1814,8 @@ impl<'a> Inflate<'a> for ExceptStarHandler<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Try<'a> { pub body: Suite<'a>, pub handlers: Vec>, @@ -1832,7 +1867,8 @@ impl<'a> Inflate<'a> for Try<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct TryStar<'a> { pub body: Suite<'a>, pub handlers: Vec>, @@ -1884,7 +1920,8 @@ impl<'a> Inflate<'a> for TryStar<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct AugAssign<'a> { pub target: AssignTargetExpression<'a>, pub operator: AugOp<'a>, @@ -1920,7 +1957,8 @@ impl<'a> AugAssign<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct WithItem<'a> { pub item: Expression<'a>, pub asname: Option>, @@ -1961,7 +1999,8 @@ impl<'a> WithComma<'a> for WithItem<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct With<'a> { pub items: Vec>, pub body: Suite<'a>, @@ -2075,7 +2114,8 @@ impl<'a> Inflate<'a> for With<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum DelTargetExpression<'a> { Name(Name<'a>), Attribute(Attribute<'a>), @@ -2104,7 +2144,8 @@ impl<'a> std::convert::From> for Element<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Del<'a> { pub target: DelTargetExpression<'a>, pub whitespace_after_del: SimpleWhitespace<'a>, @@ -2140,7 +2181,8 @@ impl<'a> Del<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Match<'a> { pub subject: Expression<'a>, pub cases: Vec>, @@ -2215,7 +2257,8 @@ impl<'a> Inflate<'a> for Match<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchCase<'a> { pub pattern: MatchPattern<'a>, pub guard: Option>, @@ -2279,7 +2322,8 @@ impl<'a> Inflate<'a> for MatchCase<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, IntoPy, Codegen, Inflate, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum MatchPattern<'a> { Value(MatchValue<'a>), Singleton(MatchSingleton<'a>), @@ -2290,7 +2334,8 @@ pub enum MatchPattern<'a> { Or(Box>), } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchValue<'a> { pub value: Expression<'a>, } @@ -2328,7 +2373,8 @@ impl<'a> Inflate<'a> for MatchValue<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchSingleton<'a> { pub value: Name<'a>, } @@ -2367,13 +2413,15 @@ impl<'a> Inflate<'a> for MatchSingleton<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, IntoPy, Codegen, Inflate, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum MatchSequence<'a> { MatchList(MatchList<'a>), MatchTuple(MatchTuple<'a>), } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchList<'a> { pub patterns: Vec>, pub lbracket: Option>, @@ -2418,7 +2466,8 @@ impl<'a> Inflate<'a> for MatchList<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchTuple<'a> { pub patterns: Vec>, pub lpar: Vec>, @@ -2456,7 +2505,8 @@ impl<'a> Inflate<'a> for MatchTuple<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum StarrableMatchSequenceElement<'a> { Simple(MatchSequenceElement<'a>), Starred(MatchStar<'a>), @@ -2491,7 +2541,8 @@ impl<'a> WithComma<'a> for StarrableMatchSequenceElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchSequenceElement<'a> { pub value: MatchPattern<'a>, pub comma: Option>, @@ -2531,7 +2582,8 @@ impl<'a> WithComma<'a> for MatchSequenceElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchStar<'a> { pub name: Option>, pub comma: Option>, @@ -2584,7 +2636,8 @@ impl<'a> WithComma<'a> for MatchStar<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchMapping<'a> { pub elements: Vec>, pub rest: Option>, @@ -2648,7 +2701,8 @@ impl<'a> Inflate<'a> for MatchMapping<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchMappingElement<'a> { pub key: Expression<'a>, pub pattern: MatchPattern<'a>, @@ -2702,7 +2756,8 @@ impl<'a> WithComma<'a> for MatchMappingElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchClass<'a> { pub cls: NameOrAttribute<'a>, pub patterns: Vec>, @@ -2778,7 +2833,8 @@ impl<'a> Inflate<'a> for MatchClass<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchKeywordElement<'a> { pub key: Name<'a>, pub pattern: MatchPattern<'a>, @@ -2831,7 +2887,8 @@ impl<'a> WithComma<'a> for MatchKeywordElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchAs<'a> { pub pattern: Option>, pub name: Option>, @@ -2882,7 +2939,8 @@ impl<'a> Inflate<'a> for MatchAs<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchOrElement<'a> { pub pattern: MatchPattern<'a>, pub separator: Option>, @@ -2906,7 +2964,8 @@ impl<'a> Inflate<'a> for MatchOrElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct MatchOr<'a> { pub patterns: Vec>, pub lpar: Vec>, diff --git a/native/libcst/src/nodes/whitespace.rs b/native/libcst/src/nodes/whitespace.rs index 2c99a048..13c4c8c4 100644 --- a/native/libcst/src/nodes/whitespace.rs +++ b/native/libcst/src/nodes/whitespace.rs @@ -3,11 +3,13 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +#[cfg(feature = "py")] use libcst_derive::IntoPy; use super::{Codegen, CodegenState}; -#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Default, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct SimpleWhitespace<'a>(pub &'a str); impl<'a> Codegen<'a> for SimpleWhitespace<'a> { @@ -16,7 +18,8 @@ impl<'a> Codegen<'a> for SimpleWhitespace<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Comment<'a>(pub &'a str); impl<'a> Default for Comment<'a> { @@ -31,7 +34,8 @@ impl<'a> Codegen<'a> for Comment<'a> { } } -#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Default, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct Newline<'a>(pub Option<&'a str>, pub Fakeness); #[derive(Debug, PartialEq, Eq, Clone)] @@ -59,7 +63,8 @@ impl<'a> Codegen<'a> for Newline<'a> { } } -#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Default, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct TrailingWhitespace<'a> { pub whitespace: SimpleWhitespace<'a>, pub comment: Option>, @@ -76,7 +81,8 @@ impl<'a> Codegen<'a> for TrailingWhitespace<'a> { } } -#[derive(Debug, Clone, PartialEq, Eq, IntoPy)] +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct EmptyLine<'a> { pub indent: bool, pub whitespace: SimpleWhitespace<'a>, @@ -124,7 +130,8 @@ impl<'a> EmptyLine<'a> { } } -#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Default, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub struct ParenthesizedWhitespace<'a> { pub first_line: TrailingWhitespace<'a>, pub empty_lines: Vec>, @@ -145,7 +152,8 @@ impl<'a> Codegen<'a> for ParenthesizedWhitespace<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] +#[derive(Debug, Eq, PartialEq, Clone)] +#[cfg_attr(feature = "py", derive(IntoPy))] pub enum ParenthesizableWhitespace<'a> { SimpleWhitespace(SimpleWhitespace<'a>), ParenthesizedWhitespace(ParenthesizedWhitespace<'a>), diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index 44573248..6438ffbe 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -3,9 +3,6 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree -use pyo3::types::{IntoPyDict, PyModule}; -use pyo3::{IntoPy, PyErr, PyErrArguments, Python}; - use crate::parser::grammar::TokVec; use crate::tokenizer::whitespace_parser::WhitespaceError; use crate::tokenizer::TokError; @@ -28,56 +25,65 @@ pub enum ParserError<'a> { OperatorError, } -impl<'a> From> for PyErr { - fn from(e: ParserError) -> Self { - Python::with_gil(|py| { - let lines = match &e { - ParserError::TokenizerError(_, text) | ParserError::ParserError(_, text) => { - text.lines().collect::>() - } - _ => vec![""], - }; - let (line, col) = match &e { - ParserError::ParserError(err, ..) => { - (err.location.start_pos.line, err.location.start_pos.column) - } - _ => (0, 0), - }; - let kwargs = [ - ("message", e.to_string().into_py(py)), - ("lines", lines.into_py(py)), - ("raw_line", line.into_py(py)), - ("raw_column", col.into_py(py)), +#[cfg(feature = "py")] +mod py_error { + + use pyo3::types::{IntoPyDict, PyModule}; + use pyo3::{IntoPy, PyErr, PyErrArguments, Python}; + + use super::ParserError; + + struct Details { + message: String, + lines: Vec, + raw_line: u32, + raw_column: u32, + } + + impl<'a> From> for PyErr { + fn from(e: ParserError) -> Self { + Python::with_gil(|py| { + let lines = match &e { + ParserError::TokenizerError(_, text) | ParserError::ParserError(_, text) => { + text.lines().collect::>() + } + _ => vec![""], + }; + let (line, col) = match &e { + ParserError::ParserError(err, ..) => { + (err.location.start_pos.line, err.location.start_pos.column) + } + _ => (0, 0), + }; + let kwargs = [ + ("message", e.to_string().into_py(py)), + ("lines", lines.into_py(py)), + ("raw_line", line.into_py(py)), + ("raw_column", col.into_py(py)), + ] + .into_py_dict(py); + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + PyErr::from_instance( + libcst + .getattr("ParserSyntaxError") + .expect("ParserSyntaxError not found") + .call((), Some(kwargs)) + .expect("failed to instantiate"), + ) + }) + } + } + + impl<'a> PyErrArguments for Details { + fn arguments(self, py: pyo3::Python) -> pyo3::PyObject { + [ + ("message", self.message.into_py(py)), + ("lines", self.lines.into_py(py)), + ("raw_line", self.raw_line.into_py(py)), + ("raw_column", self.raw_column.into_py(py)), ] - .into_py_dict(py); - let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); - PyErr::from_instance( - libcst - .getattr("ParserSyntaxError") - .expect("ParserSyntaxError not found") - .call((), Some(kwargs)) - .expect("failed to instantiate"), - ) - }) - } -} - -struct Details { - message: String, - lines: Vec, - raw_line: u32, - raw_column: u32, -} - -impl<'a> PyErrArguments for Details { - fn arguments(self, py: pyo3::Python) -> pyo3::PyObject { - [ - ("message", self.message.into_py(py)), - ("lines", self.lines.into_py(py)), - ("raw_line", self.raw_line.into_py(py)), - ("raw_column", self.raw_column.into_py(py)), - ] - .into_py_dict(py) - .into_py(py) + .into_py_dict(py) + .into_py(py) + } } } From 91212cd6d77e11b5614516ef40673a5f842c4c22 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Sun, 16 Jan 2022 11:34:25 -0800 Subject: [PATCH 193/632] Full handling for applying type comments to Assign (#599) * Add full support type comment -> PEP 526 conversion Summary: In the previous PR, I added basic support for converting an Assign with a type comment to an AnnAssign, as long as there was only one target. This PR handles all fully PEP 484 compliant cases: - multiple assignments - multiple elements in the LHS l-value We cannot handle arity errors because there's no way to do it. And we don't try to handle the ambiguous case of multiple assignments with mismatched arities (PEP 484 isn't super clear on which LHS is supposed to pick up the type, we are conservative here). The ambiguous case is probably very uncommon in real code anyway, multiple assignment is not a widely used feature. Test Plan: There are new test cases covering: - multiple elements in the LHS - multiple assignment - both of the above together - semicolon expansion, which is handled differently in the cases where we have to add type declarations - new error cases: - mismatched arity in both directions on one assignment - mismatched arity in multiple assignment ``` > python -m unittest libcst.codemod.commands.tests.test_convert_type_comments ..... ---------------------------------------------------------------------- Ran 5 tests in 0.150s OK ``` --- .../codemod/commands/convert_type_comments.py | 269 ++++++++++++++---- .../tests/test_convert_type_comments.py | 101 +++++-- 2 files changed, 304 insertions(+), 66 deletions(-) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index 337904d2..a786f21a 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -7,7 +7,9 @@ import ast import builtins import functools import sys -from typing import Optional, Set, Union +from typing import List, Optional, Set, Tuple, Union + +from typing_extensions import TypeAlias import libcst as cst from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand @@ -27,8 +29,8 @@ def _ast_for_node(node: cst.CSTNode) -> ast.Module: return ast.parse(code, type_comments=True) -def _simple_statement_type_comment( - node: cst.SimpleStatementLine, +def _statement_type_comment( + node: Union[cst.SimpleStatementLine, cst.For], ) -> Optional[str]: return _ast_for_node(node).body[-1].type_comment @@ -56,29 +58,178 @@ def _convert_annotation(raw: str) -> cst.Annotation: return cst.Annotation(annotation=cst.SimpleString(f'"{raw}"')) +class _FailedToApplyAnnotation: + pass + + +class _ArityError(Exception): + pass + + +UnpackedBindings: TypeAlias = Union[cst.BaseExpression, List["UnpackedBindings"]] +UnpackedAnnotations: TypeAlias = Union[str, List["UnpackedAnnotations"]] +TargetAnnotationPair: TypeAlias = Tuple[cst.BaseExpression, str] + + +class AnnotationSpreader: + """ + Utilities to help with lining up tuples of types from type comments with + the tuples of values with which they should be associated. + """ + + @staticmethod + def _unparse_annotation( + expression: ast.expr, + ) -> UnpackedAnnotations: + if isinstance(expression, ast.Tuple): + return [ + AnnotationSpreader._unparse_annotation(elt) for elt in expression.elts + ] + else: + return ast.unparse(expression) + + @staticmethod + def unpack_type_comment( + type_comment: str, + ) -> UnpackedAnnotations: + """ + Unpack an ast module expression and unparse it into a recursive + list of strings matching the tuple structure of the type comment. + """ + # pyre-ignore[16]: the ast module stubs do not have full details + annotation_ast = ast.parse(type_comment, "", "eval").body + return AnnotationSpreader._unparse_annotation(annotation_ast) + + @staticmethod + def unpack_target( + target: cst.BaseExpression, + ) -> UnpackedBindings: + """ + Take a (non-function-type) type comment and split it into + components. A type comment body should always be either a single + type or a tuple of types. + + We work with strings for annotations because without detailed scope + analysis that is the safest option for codemods. + """ + if isinstance(target, cst.Tuple): + return [ + AnnotationSpreader.unpack_target(element.value) + for element in target.elements + ] + else: + return target + + @staticmethod + def annotated_bindings( + bindings: UnpackedBindings, + annotations: UnpackedAnnotations, + ) -> List[Tuple[cst.BaseAssignTargetExpression, str]]: + if isinstance(annotations, list): + if isinstance(bindings, list) and len(bindings) == len(annotations): + # The arities match, so we return the flattened result of + # mapping annotated_bindings over each pair. + out: List[Tuple[cst.BaseAssignTargetExpression, str]] = [] + for binding, annotation in zip(bindings, annotations): + out.extend( + AnnotationSpreader.annotated_bindings(binding, annotation) + ) + return out + else: + # Either mismatched lengths, or multi-type and one-target + raise _ArityError() + elif isinstance(bindings, list): + # multi-target and one-type + raise _ArityError() + else: + assert isinstance(bindings, cst.BaseAssignTargetExpression) + return [(bindings, annotations)] + + @staticmethod + def type_declaration( + binding: cst.BaseAssignTargetExpression, + raw_annotation: str, + ) -> cst.AnnAssign: + return cst.AnnAssign( + target=binding, + annotation=_convert_annotation(raw=raw_annotation), + value=None, + ) + + +def convert_Assign( + node: cst.Assign, + type_comment: str, +) -> Union[ + _FailedToApplyAnnotation, + cst.AnnAssign, + List[Union[cst.AnnAssign, cst.Assign]], +]: + # zip the type and target information tother. If there are mismatched + # arities, this is a PEP 484 violation (technically we could use + # logic beyond the PEP to recover some cases as typing.Tuple, but this + # should be rare) so we give up. + try: + annotations = AnnotationSpreader.unpack_type_comment(type_comment) + annotated_targets = [ + AnnotationSpreader.annotated_bindings( + bindings=AnnotationSpreader.unpack_target(target.target), + annotations=annotations, + ) + for target in node.targets + ] + except _ArityError: + return _FailedToApplyAnnotation() + if len(annotated_targets) == 1 and len(annotated_targets[0]) == 1: + # We can convert simple one-target assignments into a single AnnAssign + binding, raw_annotation = annotated_targets[0][0] + return cst.AnnAssign( + target=binding, + annotation=_convert_annotation(raw=raw_annotation), + value=node.value, + semicolon=node.semicolon, + ) + else: + # For multi-target assigns (regardless of whether they are using tuples + # on the LHS or multiple `=` tokens or both), we need to add a type + # declaration per individual LHS target. + type_declarations = [ + AnnotationSpreader.type_declaration(binding, raw_annotation) + for annotated_bindings in annotated_targets + for binding, raw_annotation in annotated_bindings + ] + return [ + *type_declarations, + node, + ] + + class ConvertTypeComments(VisitorBasedCodemodCommand): """ Codemod that converts type comments, as described in https://www.python.org/dev/peps/pep-0484/#type-comments, into PEP 526 annotated assignments. - This is a work in progress: the codemod only currently handles - single-annotation assigns, but it will preserve any type comments - that it does not consume. + This is a work in progress: we intend to also support + function type comments, with statements, and for statements + but those are not yet implemented. """ def __init__(self, context: CodemodContext) -> None: - if (sys.version_info.major, sys.version_info.minor) < (3, 8): - # The ast module did not get `type_comments` until Python 3.7. - # In 3.6, we should error than silently running a nonsense codemod. + if (sys.version_info.major, sys.version_info.minor) < (3, 9): + # The ast module did not get `unparse` until Python 3.9, + # or `type_comments` until Python 3.8 # - # NOTE: it is possible to use the typed_ast library for 3.6, but - # this is not a high priority right now. See, e.g., the - # mypy.fastparse module. + # For earlier versions of python, raise early instead of failing + # later. It might be possible to use libcst parsing and the + # typed_ast library to support earlier python versions, but this is + # not a high priority. raise NotImplementedError( - "You are trying to run ConvertTypeComments on a " - + "python version without type comment support. Please " - + "try using python 3.8+ to run your codemod." + "You are trying to run ConvertTypeComments, but libcst " + + "needs to be running with Python 3.9+ in order to " + + "do this. Try using Python 3.9+ to run your codemod. " + + "Note that the target code can be using Python 3.6+, " + + "it is only libcst that needs a new Python version." ) super().__init__(context) @@ -93,27 +244,11 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): comment=None, ) - def _convert_Assign( - self, - assign: cst.Assign, - type_comment: str, - ) -> Union[cst.AnnAssign, cst.Assign]: - if len(assign.targets) != 1: - # this case is not yet implemented, and we short-circuit - # it when handling SimpleStatementLine. - raise RuntimeError("Should not convert multi-target assign") - return cst.AnnAssign( - target=assign.targets[0].target, - annotation=_convert_annotation(raw=type_comment), - value=assign.value, - semicolon=assign.semicolon, - ) - def leave_SimpleStatementLine( self, original_node: cst.SimpleStatementLine, updated_node: cst.SimpleStatementLine, - ) -> cst.SimpleStatementLine: + ) -> Union[cst.SimpleStatementLine, cst.FlattenSentinel]: """ Convert any SimpleStatementLine containing an Assign with a type comment into a one that uses a PEP 526 AnnAssign. @@ -122,25 +257,61 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): assign = updated_node.body[-1] if not isinstance(assign, cst.Assign): # only Assign matters return updated_node - type_comment = _simple_statement_type_comment(original_node) + type_comment = _statement_type_comment(original_node) if type_comment is None: return updated_node - if len(assign.targets) != 1: # multi-target Assign isn't used - return updated_node - target = assign.targets[0].target - if isinstance(target, cst.Tuple): # multi-element Assign isn't handled - return updated_node # At this point have a single-line Assign with a type comment. # Convert it to an AnnAssign and strip the comment. - return updated_node.with_changes( - body=[ - *updated_node.body[:-1], - self._convert_Assign( - assign=assign, - type_comment=type_comment, - ), - ], - trailing_whitespace=self._strip_TrailingWhitespace( - updated_node.trailing_whitespace - ), + converted = convert_Assign( + node=assign, + type_comment=type_comment, ) + if isinstance(converted, _FailedToApplyAnnotation): + # We were unable to consume the type comment, so return the + # original code unchanged. + # TODO: allow stripping the invalid type comments via a flag + return updated_node + elif isinstance(converted, cst.AnnAssign): + # We were able to convert the Assign into an AnnAssign, so + # we can update the node. + return updated_node.with_changes( + body=[*updated_node.body[:-1], converted], + trailing_whitespace=self._strip_TrailingWhitespace( + updated_node.trailing_whitespace, + ), + ) + elif isinstance(converted, list): + # We need to inject two or more type declarations. + # + # In this case, we need to split across multiple lines, and + # this also means we'll spread any multi-statement lines out + # (multi-statement lines are PEP 8 violating anyway). + # + # We still preserve leading lines from before our transform. + new_statements = [ + *( + statement.with_changes( + semicolon=cst.MaybeSentinel.DEFAULT, + ) + for statement in updated_node.body[:-1] + ), + *converted, + ] + if len(new_statements) < 2: + raise RuntimeError("Unreachable code.") + return cst.FlattenSentinel( + [ + updated_node.with_changes( + body=[new_statements[0]], + trailing_whitespace=self._strip_TrailingWhitespace( + updated_node.trailing_whitespace, + ), + ), + *( + cst.SimpleStatementLine(body=[statement]) + for statement in new_statements[1:] + ), + ] + ) + else: + raise RuntimeError(f"Unhandled value {converted}") diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index 5e7f96ea..e1e3da8b 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -11,15 +11,15 @@ from libcst.codemod.commands.convert_type_comments import ConvertTypeComments class TestConvertTypeComments(CodemodTest): - maxDiff = 1000 + maxDiff = 1500 TRANSFORM = ConvertTypeComments - def assertCodemod38Plus(self, before: str, after: str) -> None: + def assertCodemod39Plus(self, before: str, after: str) -> None: """ - Assert that the codemod works on Python 3.8+, and that we raise - a NotImplementedError on other python versions. + Assert that the codemod works on Python 3.9+, and that we raise + a NotImplementedError on other Python versions. """ - if (sys.version_info.major, sys.version_info.minor) < (3, 8): + if (sys.version_info.major, sys.version_info.minor) < (3, 9): with self.assertRaises(NotImplementedError): super().assertCodemod(before, after) else: @@ -36,15 +36,13 @@ class TestConvertTypeComments(CodemodTest): y: int = 5 z: "typing.Tuple[str, int]" = ('this', 7) """ - self.assertCodemod38Plus(before, after) + self.assertCodemod39Plus(before, after) def test_convert_assignments_in_context(self) -> None: """ Also verify that our matching works regardless of spacing """ before = """ - bar(); baz = 12 # type: int - def foo(): z = ('this', 7) # type: typing.Tuple[str, int] @@ -54,8 +52,6 @@ class TestConvertTypeComments(CodemodTest): self.attr1 = True # type: bool """ after = """ - bar(); baz: int = 12 - def foo(): z: "typing.Tuple[str, int]" = ('this', 7) @@ -64,7 +60,77 @@ class TestConvertTypeComments(CodemodTest): def __init__(self): self.attr1: bool = True """ - self.assertCodemod38Plus(before, after) + self.assertCodemod39Plus(before, after) + + def test_multiple_elements_in_assign_lhs(self) -> None: + before = """ + x, y = [], [] # type: List[int], List[str] + z, w = [], [] # type: (List[int], List[str]) + + a, b, *c = range(5) # type: float, float, List[float] + + d, (e1, e2) = foo() # type: float, (int, str) + """ + after = """ + x: "List[int]" + y: "List[str]" + x, y = [], [] + z: "List[int]" + w: "List[str]" + z, w = [], [] + + a: float + b: float + c: "List[float]" + a, b, *c = range(5) + + d: float + e1: int + e2: str + d, (e1, e2) = foo() + """ + self.assertCodemod39Plus(before, after) + + def test_multiple_assignments(self) -> None: + before = """ + x = y = z = 15 # type: int + + a, b = c, d = 'this', 'that' # type: (str, str) + """ + after = """ + x: int + y: int + z: int + x = y = z = 15 + + a: str + b: str + c: str + d: str + a, b = c, d = 'this', 'that' + """ + self.assertCodemod39Plus(before, after) + + def test_semicolons_with_assignment(self) -> None: + """ + When we convert an Assign to an AnnAssign, preserve + semicolons. But if we have to add separate type declarations, + expand them. + """ + before = """ + foo(); x = 12 # type: int + + bar(); y, z = baz() # type: int, str + """ + after = """ + foo(); x: int = 12 + + bar() + y: int + z: str + y, z = baz() + """ + self.assertCodemod39Plus(before, after) def test_no_change_when_type_comment_unused(self) -> None: before = """ @@ -77,12 +143,13 @@ class TestConvertTypeComments(CodemodTest): # a type comment in an illegal location won't be used print("hello") # type: None - # We currently cannot handle multiple-target assigns. - # Make sure we won't strip those type comments. - x, y, z = [], [], [] # type: List[int], List[int], List[str] - x, y, z = [], [], [] # type: (List[int], List[int], List[str]) - a, b, *c = range(5) # type: float, float, List[float] + # These examples are not PEP 484 compliant, and result in arity errors a, b = 1, 2 # type: Tuple[int, int] + w = foo() # type: float, str + + # Multiple assigns with mismatched LHS arities always result in arity + # errors, and we only codemod if each target is error-free + v = v0, v1 = (3, 5) # type: int, int """ after = before - self.assertCodemod38Plus(before, after) + self.assertCodemod39Plus(before, after) From 332710ddc0e684aed982c8ffb453b3d71d18b59d Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Mon, 17 Jan 2022 06:58:01 -0800 Subject: [PATCH 194/632] Add support for For and With (#607) --- .../codemod/commands/convert_type_comments.py | 119 +++++++++++++++++- .../tests/test_convert_type_comments.py | 71 +++++++++++ 2 files changed, 188 insertions(+), 2 deletions(-) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index a786f21a..ef91687b 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -7,7 +7,7 @@ import ast import builtins import functools import sys -from typing import List, Optional, Set, Tuple, Union +from typing import List, Optional, Sequence, Set, Tuple, Union from typing_extensions import TypeAlias @@ -30,7 +30,7 @@ def _ast_for_node(node: cst.CSTNode) -> ast.Module: def _statement_type_comment( - node: Union[cst.SimpleStatementLine, cst.For], + node: Union[cst.SimpleStatementLine, cst.For, cst.With], ) -> Optional[str]: return _ast_for_node(node).body[-1].type_comment @@ -156,6 +156,30 @@ class AnnotationSpreader: value=None, ) + @staticmethod + def type_declaration_statements( + bindings: UnpackedBindings, + annotations: UnpackedAnnotations, + leading_lines: Sequence[cst.EmptyLine], + ) -> List[cst.SimpleStatementLine]: + return [ + cst.SimpleStatementLine( + body=[ + AnnotationSpreader.type_declaration( + binding=binding, + raw_annotation=raw_annotation, + ) + ], + leading_lines=leading_lines if i == 0 else [], + ) + for i, (binding, raw_annotation) in enumerate( + AnnotationSpreader.annotated_bindings( + bindings=bindings, + annotations=annotations, + ) + ) + ] + def convert_Assign( node: cst.Assign, @@ -315,3 +339,94 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): ) else: raise RuntimeError(f"Unhandled value {converted}") + + def leave_For( + self, + original_node: cst.For, + updated_node: cst.For, + ) -> Union[cst.For, cst.FlattenSentinel]: + """ + Convert a For with a type hint on the bound variable(s) to + use type declarations. + """ + # Type comments are only possible when the body is an indented + # block, and we need this refinement to work with the header, + # so we check and only then extract the type comment. + body = updated_node.body + if not isinstance(body, cst.IndentedBlock): + return updated_node + type_comment = _statement_type_comment(original_node) + if type_comment is None: + return updated_node + # Zip up the type hint and the bindings. If we hit an arity + # error, abort. + try: + type_declarations = AnnotationSpreader.type_declaration_statements( + bindings=AnnotationSpreader.unpack_target(updated_node.target), + annotations=AnnotationSpreader.unpack_type_comment(type_comment), + leading_lines=updated_node.leading_lines, + ) + except _ArityError: + return updated_node + # There is no arity error, so we can add the type delaration(s) + return cst.FlattenSentinel( + [ + *type_declarations, + updated_node.with_changes( + body=body.with_changes( + header=self._strip_TrailingWhitespace(body.header) + ), + leading_lines=[], + ), + ] + ) + + def leave_With( + self, + original_node: cst.With, + updated_node: cst.With, + ) -> Union[cst.With, cst.FlattenSentinel]: + """ + Convert a With with a type hint on the bound variable(s) to + use type declarations. + """ + # Type comments are only possible when the body is an indented + # block, and we need this refinement to work with the header, + # so we check and only then extract the type comment. + body = updated_node.body + if not isinstance(body, cst.IndentedBlock): + return updated_node + type_comment = _statement_type_comment(original_node) + if type_comment is None: + return updated_node + # PEP 484 does not attempt to specify type comment semantics for + # multiple with bindings (there's more than one sensible way to + # do it), so we make no attempt to handle this + targets = [ + item.asname.name for item in updated_node.items if item.asname is not None + ] + if len(targets) != 1: + return updated_node + target = targets[0] + # Zip up the type hint and the bindings. If we hit an arity + # error, abort. + try: + type_declarations = AnnotationSpreader.type_declaration_statements( + bindings=AnnotationSpreader.unpack_target(target), + annotations=AnnotationSpreader.unpack_type_comment(type_comment), + leading_lines=updated_node.leading_lines, + ) + except _ArityError: + return updated_node + # There is no arity error, so we can add the type delaration(s) + return cst.FlattenSentinel( + [ + *type_declarations, + updated_node.with_changes( + body=body.with_changes( + header=self._strip_TrailingWhitespace(body.header) + ), + leading_lines=[], + ), + ] + ) diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index e1e3da8b..8b0b44de 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -132,6 +132,65 @@ class TestConvertTypeComments(CodemodTest): """ self.assertCodemod39Plus(before, after) + def test_converting_for_statements(self) -> None: + before = """ + # simple binding + for x in foo(): # type: int + pass + + # nested binding + for (a, (b, c)) in bar(): # type: int, (str, float) + pass + """ + after = """ + # simple binding + x: int + for x in foo(): + pass + + # nested binding + a: int + b: str + c: float + for (a, (b, c)) in bar(): + pass + """ + self.assertCodemod39Plus(before, after) + + def test_converting_with_statements(self) -> None: + before = """ + # simple binding + with open('file') as f: # type: File + pass + + # simple binding, with extra items + with foo(), open('file') as f, bar(): # type: File + pass + + # nested binding + with bar() as (a, (b, c)): # type: int, (str, float) + pass + """ + after = """ + # simple binding + f: "File" + with open('file') as f: + pass + + # simple binding, with extra items + f: "File" + with foo(), open('file') as f, bar(): + pass + + # nested binding + a: int + b: str + c: float + with bar() as (a, (b, c)): + pass + """ + self.assertCodemod39Plus(before, after) + def test_no_change_when_type_comment_unused(self) -> None: before = """ # type-ignores are not type comments @@ -150,6 +209,18 @@ class TestConvertTypeComments(CodemodTest): # Multiple assigns with mismatched LHS arities always result in arity # errors, and we only codemod if each target is error-free v = v0, v1 = (3, 5) # type: int, int + + # Ignore for statements with arity mismatches + for x in []: # type: int, int + pass + + # Ignore with statements with arity mismatches + with open('file') as (f0, f1): # type: File + pass + + # Ignore with statements that have multiple item bindings + with open('file') as f0, open('file') as f1: # type: File + pass """ after = before self.assertCodemod39Plus(before, after) From 0c509b3f43cfb273a8915e197203fd2247069e32 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 18 Jan 2022 08:47:42 +0000 Subject: [PATCH 195/632] Stop indentation checking at EOF (#611) --- native/libcst/src/tokenizer/core/mod.rs | 10 +++++++--- native/libcst/src/tokenizer/tests.rs | 17 +++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 3ff97e7d..0dd60944 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -533,9 +533,13 @@ impl<'t> TokState<'t> { } } - // Lines with only whitespace and/or comments and/or a line continuation character shouldn't - // affect the indentation and are not passed to the parser as NEWLINE tokens. - self.blank_line = matches!(self.text_pos.peek(), Some('#') | Some('\n') | Some('\\')); + // Lines with only whitespace and/or comments and/or a line continuation + // character shouldn't affect the indentation and are not passed to the parser + // as NEWLINE tokens. + self.blank_line = matches!( + self.text_pos.peek(), + Some('#') | Some('\n') | Some('\\') | None + ); if self.blank_line || !self.paren_stack.is_empty() { return Ok(()); diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index db437857..2be070d1 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -727,3 +727,20 @@ fn test_add_dedents_for_dangling_indent_with_comment() { ]) ); } + +#[test] +fn test_inconsistent_indentation_at_eof() { + assert_eq!( + tokenize_all("if 1:\n pass\n ", &default_config()), + Ok(vec![ + (TokType::Name, "if"), + (TokType::Number, "1"), + (TokType::Op, ":"), + (TokType::Newline, "\n"), + (TokType::Indent, ""), + (TokType::Name, "pass"), + (TokType::Newline, "\n"), + (TokType::Dedent, ""), + ]) + ) +} From 9563b4a9d5320a9ccc3568cd4a27b93c0acbed4f Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Tue, 18 Jan 2022 07:20:21 -0800 Subject: [PATCH 196/632] Support FunctionDef transformations (#610) I've tested all of the edge cases I know of: type comments in various locations, non-type-comments, arity mismatches where we should skip, etc. Assuming that all type comments parse, this should work as far as I know. I'll make a separate PR to deal with SyntaxErrors when parsing types, because that is cross-cutting and not specific to FunctionDef. --- .../codemod/commands/convert_type_comments.py | 304 +++++++++++++++++- .../tests/test_convert_type_comments.py | 115 ++++++- 2 files changed, 410 insertions(+), 9 deletions(-) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index ef91687b..3a27da71 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -5,9 +5,10 @@ import ast import builtins +import dataclasses import functools import sys -from typing import List, Optional, Sequence, Set, Tuple, Union +from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Union from typing_extensions import TypeAlias @@ -58,6 +59,25 @@ def _convert_annotation(raw: str) -> cst.Annotation: return cst.Annotation(annotation=cst.SimpleString(f'"{raw}"')) +def _is_type_comment(comment: Optional[cst.Comment]) -> bool: + """ + Determine whether a comment is a type comment. + + Unfortunately, to strip type comments in a location-invariant way requires + finding them from pure libcst data. We only use this in function defs, where + the precise cst location of the type comment cna be hard to predict. + """ + if comment is None: + return False + value = comment.value[1:].strip() + if not value.startswith("type:"): + return False + suffix = value.removeprefix("type:").strip().split() + if len(suffix) > 0 and suffix[0] == "ignore": + return False + return True + + class _FailedToApplyAnnotation: pass @@ -228,17 +248,144 @@ def convert_Assign( ] +@dataclasses.dataclass(frozen=True) +class FunctionTypeInfo: + arguments: Dict[str, Optional[str]] + returns: Optional[str] + + def is_empty(self) -> bool: + return self.returns is None and self.arguments == {} + + @classmethod + def from_cst( + cls, + node_cst: cst.FunctionDef, + ) -> "FunctionTypeInfo": + """ + Using the `ast` type comment extraction logic, get type information + for a function definition. + + To understand edge case behavior see the `leave_FunctionDef` docstring. + """ + # pyre-ignore[33]: ast doesn't have complete stubs + node_ast: Any = ast.parse(_code_for_node(node_cst), type_comments=True).body[0] + # Note: this is guaranteed to have the correct arity. + args = [ + *node_ast.args.posonlyargs, + *node_ast.args.args, + *( + [] + if node_ast.args.vararg is None + else [ + node_ast.args.vararg, + ] + ), + *node_ast.args.kwonlyargs, + *( + [] + if node_ast.args.kwarg is None + else [ + node_ast.args.kwarg, + ] + ), + ] + function_type_comment = node_ast.type_comment + if function_type_comment is None: + return cls( + arguments={arg.arg: arg.type_comment for arg in args}, + returns=None, + ) + else: + # pyre-ignore[33]: ast doesn't have complete stubs + function_type_ast: Any = ast.parse( + node_ast.type_comment, + "", + mode="func_type", + ) + argtypes = function_type_ast.argtypes + returns = ast.unparse(function_type_ast.returns) + if ( + len(argtypes) == 1 + and isinstance(argtypes[0], ast.Constant) + and argtypes[0].value is Ellipsis + ): + # Only use the return type if the comment was like `(...) -> R` + return cls( + arguments={arg.arg: arg.type_comment for arg in args}, + returns=returns, + ) + elif len(argtypes) == len(args): + # Merge the type comments, preferring inline comments where available + return cls( + arguments={ + arg.arg: arg.type_comment or ast.unparse(from_func_type) + for arg, from_func_type in zip(args, argtypes) + }, + returns=returns, + ) + else: + # On arity mismatches, ignore the type information + return cls({}, None) + + class ConvertTypeComments(VisitorBasedCodemodCommand): """ - Codemod that converts type comments, as described in - https://www.python.org/dev/peps/pep-0484/#type-comments, - into PEP 526 annotated assignments. + Codemod that converts type comments into Python 3.6+ style + annotations. - This is a work in progress: we intend to also support - function type comments, with statements, and for statements - but those are not yet implemented. + We can handle type comments in the following statement types: + - Assign + - This is converted into a single AnnAssign when possible + - In more complicated cases it will produce multiple AnnAssign + nodes with no value (i.e. "type declaration" statements) + followed by an Assign + - For and With + - We prepend both of these with type declaration statements. + - FunctionDef + - We apply all the types we can find. If we find several: + - We prefer any existing annotations to type comments + - For parameters, we prefer inline type comments to + function-level type comments if we find both. + + We always apply the type comments as quoted annotations, unless + we know that it refers to a builtin. We do not guarantee that + the resulting string annotations would parse, but they should + never cause failures at module import time. + + We attempt to: + - Always strip type comments for statements where we successfully + applied types. + - Never strip type comments for statements where we failed to + apply types. + + There are many edge case possible where the arity of a type + hint (which is either a tuple or a func_type) might not match + the code. In these cases we generally give up: + - For Assign, For, and With, we require that every target of + bindings (e.g. a tuple of names being bound) must have exactly + the same arity as the comment. + - So, for example, we would skip an assignment statement such as + ``x = y, z = 1, 2 # type: int, int`` because the arity + of ``x`` does not match the arity of the hint. + - For FunctionDef, we do *not* check arity of inline parameter + type comments but we do skip the transform if the arity of + the function does not match the function-level comment. """ + # Finding the location of a type comment in a FunctionDef is difficult. + # + # As a result, if when visiting a FunctionDef header we are able to + # successfully extrct type information then we aggressively strip type + # comments until we reach the first statement in the body. + # + # Once we get there we have to stop, so that we don't unintentionally remove + # unprocessed type comments. + # + # This state handles tracking everything we need for this. + function_type_info_stack: List[FunctionTypeInfo] + function_body_stack: List[cst.BaseSuite] + aggressively_strip_type_comments: bool + def __init__(self, context: CodemodContext) -> None: if (sys.version_info.major, sys.version_info.minor) < (3, 9): # The ast module did not get `unparse` until Python 3.9, @@ -256,6 +403,9 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): + "it is only libcst that needs a new Python version." ) super().__init__(context) + self.function_type_info_stack = [] + self.function_body_stack = [] + self.aggressively_strip_type_comments = False def _strip_TrailingWhitespace( self, @@ -430,3 +580,143 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): ), ] ) + + # Handle function definitions ------------------------- + + # **Implementation Notes** + # + # It is much harder to predict where exactly type comments will live + # in function definitions than in Assign / For / With. + # + # As a result, we use two different patterns: + # (A) we aggressively strip out type comments from whitespace between the + # start of a function define and the start of the body, whenever we were + # able to extract type information. This is done via mutable state and the + # usual visitor pattern. + # (B) we also manually reach down to the first statement inside of the + # funciton body and aggressively strip type comments from leading + # whitespaces + + def visit_FunctionDef( + self, + node: cst.FunctionDef, + ) -> None: + """ + Set up the data we need to handle function definitions: + - Parse the type comments. + - Store the resulting function type info on the stack, where it will + remain until we use it in `leave_FunctionDef` + - Set that we are aggressively stripping type comments, which will + remain true until we visit the body. + """ + function_type_info = FunctionTypeInfo.from_cst(node) + self.aggressively_strip_type_comments = not function_type_info.is_empty() + self.function_type_info_stack.append(function_type_info) + self.function_body_stack.append(node.body) + + def leave_TrailingWhitespace( + self, + original_node: cst.TrailingWhitespace, + updated_node: cst.TrailingWhitespace, + ) -> Union[cst.TrailingWhitespace]: + "Aggressively remove type comments when in header if we extracted types." + if self.aggressively_strip_type_comments and _is_type_comment( + updated_node.comment + ): + return cst.TrailingWhitespace() + else: + return updated_node + + def leave_EmptyLine( + self, + original_node: cst.EmptyLine, + updated_node: cst.EmptyLine, + ) -> Union[cst.EmptyLine, cst.RemovalSentinel]: + "Aggressively remove type comments when in header if we extracted types." + if self.aggressively_strip_type_comments and _is_type_comment( + updated_node.comment + ): + return cst.RemovalSentinel.REMOVE + else: + return updated_node + + def visit_FunctionDef_body( + self, + node: cst.FunctionDef, + ) -> None: + "Turn off aggressive type comment removal when we've leaved the header." + self.aggressively_strip_type_comments = False + + def leave_IndentedBlock( + self, + original_node: cst.IndentedBlock, + updated_node: cst.IndentedBlock, + ) -> cst.IndentedBlock: + "When appropriate, strip function type comment from the function body." + # abort unless this is the body of a function we are transforming + if len(self.function_body_stack) == 0: + return updated_node + if original_node is not self.function_body_stack[-1]: + return updated_node + if self.function_type_info_stack[-1].is_empty(): + return updated_node + # The comment will be in the body header if it was on the same line + # as the colon. + if _is_type_comment(updated_node.header.comment): + updated_node = updated_node.with_changes( + header=cst.TrailingWhitespace(), + ) + # The comment will be in a leading line of the first body statement + # if it was on the first line after the colon. + first_statement = updated_node.body[0] + if not hasattr(first_statement, "leading_lines"): + return updated_node + return updated_node.with_changes( + body=[ + first_statement.with_changes( + leading_lines=[ + line + # pyre-ignore[16]: we refined via `hasattr` + for line in first_statement.leading_lines + if not _is_type_comment(line.comment) + ] + ), + *updated_node.body[1:], + ] + ) + + # Methods for adding type annotations ---- + # + # By the time we get here, all type comments should already be stripped. + + def leave_Param( + self, + original_node: cst.Param, + updated_node: cst.Param, + ) -> cst.Param: + # ignore type comments if there's already an annotation + if updated_node.annotation is not None: + return updated_node + # find out if there's a type comment and apply it if so + function_type_info = self.function_type_info_stack[-1] + raw_annotation = function_type_info.arguments.get(updated_node.name.value) + if raw_annotation is not None: + return updated_node.with_changes( + annotation=_convert_annotation(raw=raw_annotation) + ) + else: + return updated_node + + def leave_FunctionDef( + self, + original_node: cst.FunctionDef, + updated_node: cst.FunctionDef, + ) -> cst.FunctionDef: + self.function_body_stack.pop() + function_type_info = self.function_type_info_stack.pop() + if updated_node.returns is None and function_type_info.returns is not None: + return updated_node.with_changes( + returns=_convert_annotation(raw=function_type_info.returns) + ) + else: + return updated_node diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index 8b0b44de..a50399d2 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -9,7 +9,7 @@ from libcst.codemod import CodemodTest from libcst.codemod.commands.convert_type_comments import ConvertTypeComments -class TestConvertTypeComments(CodemodTest): +class TestConvertTypeCommentsBase(CodemodTest): maxDiff = 1500 TRANSFORM = ConvertTypeComments @@ -25,8 +25,8 @@ class TestConvertTypeComments(CodemodTest): else: super().assertCodemod(before, after) - # Tests converting assignment type comments ----------------- +class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): def test_convert_assignments(self) -> None: before = """ y = 5 # type: int @@ -224,3 +224,114 @@ class TestConvertTypeComments(CodemodTest): """ after = before self.assertCodemod39Plus(before, after) + + +class TestConvertTypeComments_FunctionDef(TestConvertTypeCommentsBase): + """ + Some notes on our testing strategy: In order to avoid a combinatorial + explosion in test cases, we leverage some knowledge about the + implementation. + + Here are the key ideas that allow us to write fewer cases: + - The logic for generating annotations is the same for all annotations, + and is well-covered by TestConvertTypeComments_AssignForWith, so we + can stick to just simple builtin types. + - The application of types is independent of where they came from. + - Type comment removal is indepenent of type application, other + than in the case where we give up entirely. + - The rules for which type gets used (existing annotation, inline comment, + or func type comment) is independent of the location of a parameter. + """ + + def test_simple_function_type_comments(self) -> None: + before = """ + def f0(x): # type: (...) -> None + pass + + def f1(x): # type: (int) -> None + pass + + def f2(x, /, y = 'y', *, z = 1.5): + # type: (int, str, float) -> None + pass + + def f3(x, *args, y, **kwargs): + # type: (str, int, str, float) -> None + pass + + def f4(x, *args, **kwargs): + # type: (str, *int, **float) -> None + pass + """ + after = """ + def f0(x) -> None: + pass + + def f1(x: int) -> None: + pass + + def f2(x: int, /, y: str = 'y', *, z: float = 1.5) -> None: + pass + + def f3(x: str, *args: int, y: str, **kwargs: float) -> None: + pass + + def f4(x: str, *args: int, **kwargs: float) -> None: + pass + """ + self.assertCodemod39Plus(before, after) + + def test_prioritization_order_for_type_application(self) -> None: + before = """ + def f( + x: int, # type: str + y, # type: str + z + ): # type: (float, float, float) -> None + pass + """ + after = """ + def f( + x: int, + y: str, + z: float + ) -> None: + pass + """ + self.assertCodemod39Plus(before, after) + + def test_inlined_function_type_comments(self) -> None: + before = """ + def f( + x, # not-a-type-comment + # also-not-a-type-comment + y = 42, # type: int + *args, + # type: technically-another-line-is-legal :o + z, + **kwargs, # type: str + ): # not-a-type-comment + # also-not-a-type-comment + pass + """ + after = """ + def f( + x, # not-a-type-comment + # also-not-a-type-comment + y: int = 42, + *args: "technically-another-line-is-legal :o", + z, + **kwargs: str, + ): # not-a-type-comment + # also-not-a-type-comment + pass + """ + self.assertCodemod39Plus(before, after) + + def test_no_change_if_arity_error_in_func_type_comment(self) -> None: + before = """ + def f(x, y): # type: (int) -> float + pass + """ + after = before + self.assertCodemod39Plus(before, after) From f2c7cfe00a1f646f514e69c5cdbf70737f9163f5 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 19 Jan 2022 16:27:39 +0000 Subject: [PATCH 197/632] [ci] Cache rust build artifacts (#606) * use Swatinem/rust-cache to cache rust build artifacts * use rust cache in cibuildwheel on linux --- .github/workflows/build.yml | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7e93f9e6..778e1416 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -34,6 +34,11 @@ jobs: uses: actions-rs/toolchain@v1 with: toolchain: stable + - if: ${{ matrix.parser == 'native' }} + name: Rust Cache + uses: Swatinem/rust-cache@v1.3.0 + with: + working-directory: native - run: >- echo LIBCST_PARSER_TYPE=${{ matrix.parser }} >> $GITHUB_ENV - name: Run Tests @@ -164,7 +169,9 @@ jobs: os: windows-2019 env: SCCACHE_VERSION: 0.2.13 - CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y" + CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" + CIBW_BEFORE_BUILD_LINUX: "rm -rf native/target; ln -s /host/${{github.workspace}}/native/target native/target; [ -d /host/${{github.workspace}}/native/target ] || mkdir /host/${{github.workspace}}/native/target" + CIBW_ENVIRONMENT_LINUX: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME CARGO_HOME=/host/home/runner/.cargo' CIBW_BEFORE_ALL_MACOS: "rustup target add aarch64-apple-darwin x86_64-apple-darwin" CIBW_BEFORE_ALL_WINDOWS: "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME' @@ -185,6 +192,10 @@ jobs: if: steps.cache.outputs.cache-hit != 'true' run: | pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Rust Cache + uses: Swatinem/rust-cache@v1.3.0 + with: + working-directory: native - name: Disable scmtools local scheme if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- @@ -252,6 +263,10 @@ jobs: with: toolchain: stable components: rustfmt, clippy + - name: Rust Cache + uses: Swatinem/rust-cache@v1.3.0 + with: + working-directory: native - uses: actions/setup-python@v2 with: python-version: "3.10" From ceac7fca4d9aed282bef2ea1756f66ab973107f4 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 19 Jan 2022 19:31:48 +0000 Subject: [PATCH 198/632] specify minimum rust toolchain version (#614) --- native/libcst/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index b0a9b9df..b0e111ee 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -8,6 +8,7 @@ name = "libcst" version = "0.1.0" authors = ["LibCST Developers"] edition = "2018" +rust-version = "1.53" [lib] name = "libcst_native" From 5a1220097d06b8b0bd7378e0326002482a16d1f5 Mon Sep 17 00:00:00 2001 From: Arie Bovenberg Date: Thu, 20 Jan 2022 19:19:33 +0100 Subject: [PATCH 199/632] [ci] add slotscheck (#615) Co-authored-by: Zsolt Dollenstein --- .github/workflows/build.yml | 1 + README.rst | 7 +++++++ pyproject.toml | 3 +++ requirements-dev.txt | 1 + 4 files changed, 12 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 778e1416..20aee9da 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -64,6 +64,7 @@ jobs: - run: flake8 - run: ufmt check . - run: python3 -m fixit.cli.run_rules + - run: python -m slotscheck libcst # Run pyre typechecker typecheck: diff --git a/README.rst b/README.rst index be1d5d94..94616832 100644 --- a/README.rst +++ b/README.rst @@ -164,6 +164,13 @@ changes to be conformant, run the following in the root: ufmt format && python -m fixit.cli.apply_fix +We use `slotscheck `_ to check the correctness +of class ``__slots__``. To check that slots are defined properly, run: + +.. code-block:: shell + + python -m slotscheck libcst + To run all tests, you'll need to do the following in the root: .. code-block:: shell diff --git a/pyproject.toml b/pyproject.toml index 1d33e75e..84cfc628 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,5 +5,8 @@ exclude = "native/.*" [tool.ufmt] excludes = ["native/", "stubs/"] +[tool.slotscheck] +exclude-modules = '^libcst\.(testing|tests)' + [build-system] requires = ["setuptools", "wheel", "setuptools-rust"] \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 388c7556..dbebc185 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -15,3 +15,4 @@ sphinx-rtd-theme>=0.4.3 ufmt==1.3 usort==1.0.0rc1 setuptools-rust>=0.12.1 +slotscheck>=0.7.1 From e459e6062886fcf31b980deacb37c3de49babcba Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 21 Jan 2022 08:13:35 -0800 Subject: [PATCH 200/632] Handle ast.parse failures when converting function type comments (#616) * Handle syntax errors in the ast parse function. If we encounter a syntax error in either the type comment extraction or the type comment parsing stages, ignore type information on that cst node. * Quote the FunctionType type, which does not exist in Python x3.6 --- .../codemod/commands/convert_type_comments.py | 117 +++++++++++------- .../tests/test_convert_type_comments.py | 22 +++- 2 files changed, 93 insertions(+), 46 deletions(-) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index 3a27da71..0615fd01 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -8,7 +8,7 @@ import builtins import dataclasses import functools import sys -from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Union +from typing import cast, Dict, List, Optional, Sequence, Set, Tuple, Union from typing_extensions import TypeAlias @@ -25,15 +25,52 @@ def _code_for_node(node: cst.CSTNode) -> str: return _empty_module().code_for_node(node) -def _ast_for_node(node: cst.CSTNode) -> ast.Module: +def _ast_for_statement(node: cst.CSTNode) -> ast.stmt: + """ + Get the type-comment-enriched python AST for a node. + + If there are illegal type comments, this can return a SyntaxError. + In that case, return the same node with no type comments (which will + cause this codemod to ignore it). + """ code = _code_for_node(node) - return ast.parse(code, type_comments=True) + try: + return ast.parse(code, type_comments=True).body[-1] + except SyntaxError: + return ast.parse(code, type_comments=False).body[-1] -def _statement_type_comment( - node: Union[cst.SimpleStatementLine, cst.For, cst.With], -) -> Optional[str]: - return _ast_for_node(node).body[-1].type_comment +def _parse_type_comment( + type_comment: Optional[str], +) -> Optional[ast.expr]: + """ + Attempt to parse a type comment. If it is None or if it fails to parse, + return None. + """ + if type_comment is None: + return None + try: + # pyre-ignore[16]: the ast module stubs do not have full details + return ast.parse(type_comment, "", "eval").body + except SyntaxError: + return None + + +def _annotation_for_statement( + node: cst.CSTNode, +) -> Optional[ast.expr]: + return _parse_type_comment(_ast_for_statement(node).type_comment) + + +def _parse_func_type_comment( + func_type_comment: Optional[str], +) -> Optional["ast.FunctionType"]: + if func_type_comment is None: + return None + return cast( + ast.FunctionType, + ast.parse(func_type_comment, "", "func_type"), + ) @functools.lru_cache() @@ -98,28 +135,16 @@ class AnnotationSpreader: """ @staticmethod - def _unparse_annotation( + def unpack_annotation( expression: ast.expr, ) -> UnpackedAnnotations: if isinstance(expression, ast.Tuple): return [ - AnnotationSpreader._unparse_annotation(elt) for elt in expression.elts + AnnotationSpreader.unpack_annotation(elt) for elt in expression.elts ] else: return ast.unparse(expression) - @staticmethod - def unpack_type_comment( - type_comment: str, - ) -> UnpackedAnnotations: - """ - Unpack an ast module expression and unparse it into a recursive - list of strings matching the tuple structure of the type comment. - """ - # pyre-ignore[16]: the ast module stubs do not have full details - annotation_ast = ast.parse(type_comment, "", "eval").body - return AnnotationSpreader._unparse_annotation(annotation_ast) - @staticmethod def unpack_target( target: cst.BaseExpression, @@ -203,7 +228,7 @@ class AnnotationSpreader: def convert_Assign( node: cst.Assign, - type_comment: str, + annotation: ast.expr, ) -> Union[ _FailedToApplyAnnotation, cst.AnnAssign, @@ -214,7 +239,7 @@ def convert_Assign( # logic beyond the PEP to recover some cases as typing.Tuple, but this # should be rare) so we give up. try: - annotations = AnnotationSpreader.unpack_type_comment(type_comment) + annotations = AnnotationSpreader.unpack_annotation(annotation) annotated_targets = [ AnnotationSpreader.annotated_bindings( bindings=AnnotationSpreader.unpack_target(target.target), @@ -267,8 +292,7 @@ class FunctionTypeInfo: To understand edge case behavior see the `leave_FunctionDef` docstring. """ - # pyre-ignore[33]: ast doesn't have complete stubs - node_ast: Any = ast.parse(_code_for_node(node_cst), type_comments=True).body[0] + node_ast = cast(ast.FunctionDef, _ast_for_statement(node_cst)) # Note: this is guaranteed to have the correct arity. args = [ *node_ast.args.posonlyargs, @@ -289,24 +313,27 @@ class FunctionTypeInfo: ] ), ] - function_type_comment = node_ast.type_comment - if function_type_comment is None: + try: + func_type_annotation = _parse_func_type_comment(node_ast.type_comment) + except SyntaxError: + # On unparsable function type annotations, ignore type information + return cls({}, None) + if func_type_annotation is None: return cls( - arguments={arg.arg: arg.type_comment for arg in args}, + arguments={ + arg.arg: arg.type_comment + for arg in args + if arg.type_comment is not None + }, returns=None, ) else: - # pyre-ignore[33]: ast doesn't have complete stubs - function_type_ast: Any = ast.parse( - node_ast.type_comment, - "", - mode="func_type", - ) - argtypes = function_type_ast.argtypes - returns = ast.unparse(function_type_ast.returns) + argtypes = func_type_annotation.argtypes + returns = ast.unparse(func_type_annotation.returns) if ( len(argtypes) == 1 and isinstance(argtypes[0], ast.Constant) + # pyre-ignore [16] Pyre cannot refine constant indexes (yet!) and argtypes[0].value is Ellipsis ): # Only use the return type if the comment was like `(...) -> R` @@ -431,14 +458,14 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): assign = updated_node.body[-1] if not isinstance(assign, cst.Assign): # only Assign matters return updated_node - type_comment = _statement_type_comment(original_node) - if type_comment is None: + annotation = _annotation_for_statement(original_node) + if annotation is None: return updated_node # At this point have a single-line Assign with a type comment. # Convert it to an AnnAssign and strip the comment. converted = convert_Assign( node=assign, - type_comment=type_comment, + annotation=annotation, ) if isinstance(converted, _FailedToApplyAnnotation): # We were unable to consume the type comment, so return the @@ -505,15 +532,15 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): body = updated_node.body if not isinstance(body, cst.IndentedBlock): return updated_node - type_comment = _statement_type_comment(original_node) - if type_comment is None: + annotation = _annotation_for_statement(original_node) + if annotation is None: return updated_node # Zip up the type hint and the bindings. If we hit an arity # error, abort. try: type_declarations = AnnotationSpreader.type_declaration_statements( bindings=AnnotationSpreader.unpack_target(updated_node.target), - annotations=AnnotationSpreader.unpack_type_comment(type_comment), + annotations=AnnotationSpreader.unpack_annotation(annotation), leading_lines=updated_node.leading_lines, ) except _ArityError: @@ -546,8 +573,8 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): body = updated_node.body if not isinstance(body, cst.IndentedBlock): return updated_node - type_comment = _statement_type_comment(original_node) - if type_comment is None: + annotation = _annotation_for_statement(original_node) + if annotation is None: return updated_node # PEP 484 does not attempt to specify type comment semantics for # multiple with bindings (there's more than one sensible way to @@ -563,7 +590,7 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): try: type_declarations = AnnotationSpreader.type_declaration_statements( bindings=AnnotationSpreader.unpack_target(target), - annotations=AnnotationSpreader.unpack_type_comment(type_comment), + annotations=AnnotationSpreader.unpack_annotation(annotation), leading_lines=updated_node.leading_lines, ) except _ArityError: diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index a50399d2..1bf997cb 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -199,6 +199,9 @@ class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): # a commented type comment (per PEP 484) is not a type comment z = 15 # # type: int + # ignore unparseable type comments + var = "var" # type: this is not a python type! + # a type comment in an illegal location won't be used print("hello") # type: None @@ -221,6 +224,12 @@ class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): # Ignore with statements that have multiple item bindings with open('file') as f0, open('file') as f1: # type: File pass + + # In cases where the entire statement cannot successfully be parsed + # with `type_comments=True` because of an invalid type comment, we + # skip it. Here, annotating the inner `pass` is illegal. + for x in []: # type: int + pass # type: None """ after = before self.assertCodemod39Plus(before, after) @@ -328,10 +337,21 @@ class TestConvertTypeComments_FunctionDef(TestConvertTypeCommentsBase): """ self.assertCodemod39Plus(before, after) - def test_no_change_if_arity_error_in_func_type_comment(self) -> None: + def test_no_change_if_function_type_comments_unused(self) -> None: before = """ + # arity error in arguments def f(x, y): # type: (int) -> float pass + + # unparseable function type + def f(x, y): # type: this is not a type! + pass + + # In cases where the entire statement cannot successfully be parsed + # with `type_comments=True` because of an invalid type comment, we + # skip it. Here, annotating the inner `pass` is illegal. + def f(x, y): # type: (int, int) -> None + pass # type: None """ after = before self.assertCodemod39Plus(before, after) From d35b6a54e5a9155c14f1d7c4ffea810186135d2f Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Sat, 22 Jan 2022 03:25:16 -0800 Subject: [PATCH 201/632] Do not traverse lambdas - they have no Params (#617) --- libcst/codemod/commands/convert_type_comments.py | 11 +++++++++++ .../commands/tests/test_convert_type_comments.py | 14 ++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index 0615fd01..e69b6857 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -747,3 +747,14 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): ) else: return updated_node + + def visit_Lambda( + self, + node: cst.Lambda, + ) -> bool: + """ + Disable traversing under lambdas. They don't have any statements + nested inside them so there's no need, and they do have Params which + we don't want to transform. + """ + return False diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index 1bf997cb..f2588699 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -355,3 +355,17 @@ class TestConvertTypeComments_FunctionDef(TestConvertTypeCommentsBase): """ after = before self.assertCodemod39Plus(before, after) + + def test_do_not_traverse_lambda_Param(self) -> None: + """ + The Param node can happen not just in FunctionDef but also in + Lambda. Make sure this doesn't cause problems. + """ + before = """ + @dataclass + class WrapsAFunction: + func: Callable + msg_gen: Callable = lambda self: f"calling {self.func.__name__}..." + """ + after = before + self.assertCodemod39Plus(before, after) From 2b2b25bb0871074971888e3be0e222aa1532f7b1 Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Sun, 23 Jan 2022 16:45:27 +0300 Subject: [PATCH 202/632] Don't redundantly nest StarredElement inside another Element (#624) --- libcst/_nodes/tests/test_tuple.py | 41 +++++++++++++++++++++++++++++ native/libcst/src/parser/grammar.rs | 9 ++++--- 2 files changed, 47 insertions(+), 3 deletions(-) diff --git a/libcst/_nodes/tests/test_tuple.py b/libcst/_nodes/tests/test_tuple.py index db9f2cce..0055055c 100644 --- a/libcst/_nodes/tests/test_tuple.py +++ b/libcst/_nodes/tests/test_tuple.py @@ -91,6 +91,47 @@ class TupleTest(CSTNodeTest): "parser": parse_expression, "expected_position": CodeRange((1, 1), (1, 11)), }, + # top-level two-element tuple, with one being starred + { + "node": cst.SimpleStatementLine( + body=[ + cst.Expr( + value=cst.Tuple( + [ + cst.Element(cst.Name("one"), comma=cst.Comma()), + cst.StarredElement(cst.Name("two")), + ], + lpar=[], + rpar=[], + ) + ) + ] + ), + "code": "one,*two\n", + "parser": parse_statement, + }, + # top-level three-element tuple, start/end is starred + { + "node": cst.SimpleStatementLine( + body=[ + cst.Expr( + value=cst.Tuple( + [ + cst.StarredElement( + cst.Name("one"), comma=cst.Comma() + ), + cst.Element(cst.Name("two"), comma=cst.Comma()), + cst.StarredElement(cst.Name("three")), + ], + lpar=[], + rpar=[], + ) + ) + ] + ), + "code": "*one,two,*three\n", + "parser": parse_statement, + }, # missing spaces around tuple, okay with parenthesis { "node": cst.For( diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 70d2f968..c2bfd257 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -2174,9 +2174,12 @@ fn make_assignment<'a>( } fn expr_to_element(expr: Expression) -> Element { - Element::Simple { - value: expr, - comma: Default::default(), + match expr { + Expression::StarredElement(inner_expr) => Element::Starred(inner_expr), + _ => Element::Simple { + value: expr, + comma: Default::default(), + }, } } From 2345848d4a0f92af27f292befaccb11f87b7caa1 Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Sun, 23 Jan 2022 20:10:47 +0300 Subject: [PATCH 203/632] [native] Allow unparenthesized tuples inside f-strings (#621) --- libcst/_nodes/tests/test_atom.py | 63 +++++++++++++++++++++++++++++ native/libcst/src/parser/grammar.rs | 2 +- 2 files changed, 64 insertions(+), 1 deletion(-) diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index c2e31608..82f7ab99 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -740,6 +740,69 @@ class AtomTest(CSTNodeTest): "parser": parse_expression, "expected_position": None, }, + # Unpacked tuple + { + "node": cst.FormattedString( + parts=[ + cst.FormattedStringExpression( + expression=cst.Tuple( + elements=[ + cst.Element( + value=cst.Name( + value="a", + ), + comma=cst.Comma( + whitespace_before=cst.SimpleWhitespace( + value="", + ), + whitespace_after=cst.SimpleWhitespace( + value=" ", + ), + ), + ), + cst.Element( + value=cst.Name( + value="b", + ), + ), + ], + lpar=[], + rpar=[], + ), + ), + ], + start="f'", + end="'", + ), + "code": "f'{a, b}'", + "parser": parse_expression, + "expected_position": None, + }, + # Conditional expression + { + "node": cst.FormattedString( + parts=[ + cst.FormattedStringExpression( + expression=cst.IfExp( + test=cst.Name( + value="b", + ), + body=cst.Name( + value="a", + ), + orelse=cst.Name( + value="c", + ), + ), + ), + ], + start="f'", + end="'", + ), + "code": "f'{a if b else c}'", + "parser": parse_expression, + "expected_position": None, + }, # Concatenated strings { "node": cst.ConcatenatedString( diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index c2bfd257..cc0511dd 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -1396,7 +1396,7 @@ parser! { rule _f_expr() -> Expression<'a> = (g:_bare_genexp() {Expression::GeneratorExp(g)}) - / _conditional_expression() + / star_expressions() / yield_expr() rule _f_conversion() -> &'a str From 68780fd6b2c34dd3d8b8704c39deecf9a20abab3 Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Mon, 24 Jan 2022 20:20:44 +0300 Subject: [PATCH 204/632] Don't require whitespace right after match (#628) --- libcst/_nodes/statement.py | 5 ----- libcst/_nodes/tests/test_match.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index ded7c7c6..0e1cefe6 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -2687,11 +2687,6 @@ class Match(BaseCompoundStatement): if len(self.cases) == 0: raise CSTValidationError("A match statement must have at least one case.") - if self.whitespace_after_match.empty: - raise CSTValidationError( - "Must have at least one space after a 'match' keyword" - ) - indent = self.indent if indent is not None: if len(indent) == 0: diff --git a/libcst/_nodes/tests/test_match.py b/libcst/_nodes/tests/test_match.py index edf51d84..5ceea72c 100644 --- a/libcst/_nodes/tests/test_match.py +++ b/libcst/_nodes/tests/test_match.py @@ -425,6 +425,34 @@ class MatchTest(CSTNodeTest): + " case None | False | True: pass\n", "parser": None, }, + # Match without whitespace between keyword and the expr + { + "node": cst.Match( + subject=cst.Name( + "x", lpar=[cst.LeftParen()], rpar=[cst.RightParen()] + ), + cases=[ + cst.MatchCase( + pattern=cst.MatchSingleton( + cst.Name( + "None", + lpar=[cst.LeftParen()], + rpar=[cst.RightParen()], + ) + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + whitespace_after_case=cst.SimpleWhitespace( + value="", + ), + ), + ], + whitespace_after_match=cst.SimpleWhitespace( + value="", + ), + ), + "code": "match(x):\n case(None): pass\n", + "parser": parser, + }, ) ) def test_valid(self, **kwargs: Any) -> None: From 5b6b19af840dd9c551fb18b6835cc5191a5dd349 Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Tue, 25 Jan 2022 14:02:08 +0300 Subject: [PATCH 205/632] Proxy both parentheses in some pattern matching nodes (#626) --- libcst/_nodes/statement.py | 17 ++++++++++++++++ libcst/_nodes/tests/test_match.py | 33 +++++++++++++++++++++++++++++++ 2 files changed, 50 insertions(+) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 0e1cefe6..e4b8d284 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -2843,6 +2843,14 @@ class MatchValue(MatchPattern): def lpar(self, value: Sequence[LeftParen]) -> None: self.value.lpar = value + @property + def rpar(self) -> Sequence[RightParen]: + return self.value.rpar + + @rpar.setter + def rpar(self, value: Sequence[RightParen]) -> None: + self.value.rpar = value + @add_slots @dataclass(frozen=True) @@ -2876,6 +2884,15 @@ class MatchSingleton(MatchPattern): # pyre-fixme[41]: Cannot reassign final attribute `lpar`. self.value.lpar = value + @property + def rpar(self) -> Sequence[RightParen]: + return self.value.rpar + + @rpar.setter + def rpar(self, value: Sequence[RightParen]) -> None: + # pyre-fixme[41]: Cannot reassign final attribute `rpar`. + self.value.rpar = value + @add_slots @dataclass(frozen=True) diff --git a/libcst/_nodes/tests/test_match.py b/libcst/_nodes/tests/test_match.py index 5ceea72c..a203ffef 100644 --- a/libcst/_nodes/tests/test_match.py +++ b/libcst/_nodes/tests/test_match.py @@ -39,6 +39,39 @@ class MatchTest(CSTNodeTest): + ' case "foo": pass\n', "parser": parser, }, + # Parenthesized value + { + "node": cst.Match( + subject=cst.Name( + value="x", + ), + cases=[ + cst.MatchCase( + pattern=cst.MatchAs( + pattern=cst.MatchValue( + value=cst.Integer( + value="1", + lpar=[ + cst.LeftParen(), + ], + rpar=[ + cst.RightParen(), + ], + ), + ), + name=cst.Name( + value="z", + ), + whitespace_before_as=cst.SimpleWhitespace(" "), + whitespace_after_as=cst.SimpleWhitespace(" "), + ), + body=cst.SimpleStatementSuite([cst.Pass()]), + ), + ], + ), + "code": "match x:\n case (1) as z: pass\n", + "parser": parser, + }, # List patterns { "node": cst.Match( From 595d8c39481d7106bb29937e6eb10ec3c65bfff7 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Tue, 25 Jan 2022 09:37:36 -0800 Subject: [PATCH 206/632] Add support for methods with func type comment excluding self/cls (#622) * Add support for methods with func type comment excluding self/cls PEP 484 doesn't really specify carefully how function type comments should work on methods, but since usually the type of `self` / `cls` is automatic, most use cases choose to only annotate the other arguments. As a result, this commit modifies our codemod so that non-static methods can specify either all the arguments, or all but one of them. We'll correctly zip together the inline and func-type-comment types either way, typically getting no type for `cls` or `self`. We accomplish this by using matchers to trigger the visit method for FunctionDef rather than using visit_FunctionDef, which gives us enough context to determine when a function def is a regular function versus a method (plus also matching the decorators against `@staticmethod`, so that we trigger the normal function logic in that case). Co-authored-by: Zsolt Dollenstein --- .../codemod/commands/convert_type_comments.py | 48 ++++++++++++- .../tests/test_convert_type_comments.py | 67 +++++++++++++++++++ 2 files changed, 113 insertions(+), 2 deletions(-) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index e69b6857..baccae52 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -13,6 +13,7 @@ from typing import cast, Dict, List, Optional, Sequence, Set, Tuple, Union from typing_extensions import TypeAlias import libcst as cst +import libcst.matchers as m from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand @@ -285,6 +286,7 @@ class FunctionTypeInfo: def from_cst( cls, node_cst: cst.FunctionDef, + is_method: bool, ) -> "FunctionTypeInfo": """ Using the `ast` type comment extraction logic, get type information @@ -350,6 +352,18 @@ class FunctionTypeInfo: }, returns=returns, ) + elif is_method and len(argtypes) == len(args) - 1: + # Merge as above, but skip merging the initial `self` or `cls` arg. + return cls( + arguments={ + args[0].arg: args[0].type_comment, + **{ + arg.arg: arg.type_comment or ast.unparse(from_func_type) + for arg, from_func_type in zip(args[1:], argtypes) + }, + }, + returns=returns, + ) else: # On arity mismatches, ignore the type information return cls({}, None) @@ -623,10 +637,16 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): # (B) we also manually reach down to the first statement inside of the # funciton body and aggressively strip type comments from leading # whitespaces + # + # PEP 484 underspecifies how to apply type comments to (non-static) + # methods - it would be possible to provide a type for `self`, or to omit + # it. So we accept either approach when interpreting type comments on + # non-static methods: the first argument an have a type provided or not. - def visit_FunctionDef( + def _visit_FunctionDef( self, node: cst.FunctionDef, + is_method: bool, ) -> None: """ Set up the data we need to handle function definitions: @@ -636,11 +656,35 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): - Set that we are aggressively stripping type comments, which will remain true until we visit the body. """ - function_type_info = FunctionTypeInfo.from_cst(node) + function_type_info = FunctionTypeInfo.from_cst(node, is_method=is_method) self.aggressively_strip_type_comments = not function_type_info.is_empty() self.function_type_info_stack.append(function_type_info) self.function_body_stack.append(node.body) + @m.call_if_not_inside(m.ClassDef()) + @m.visit(m.FunctionDef()) + def visit_method( + self, + node: cst.FunctionDef, + ) -> None: + return self._visit_FunctionDef( + node=node, + is_method=False, + ) + + @m.call_if_inside(m.ClassDef()) + @m.visit(m.FunctionDef()) + def visit_function( + self, + node: cst.FunctionDef, + ) -> None: + return self._visit_FunctionDef( + node=node, + is_method=not any( + m.matches(d.decorator, m.Name("staticmethod")) for d in node.decorators + ), + ) + def leave_TrailingWhitespace( self, original_node: cst.TrailingWhitespace, diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index f2588699..6bd5a8a3 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -337,6 +337,73 @@ class TestConvertTypeComments_FunctionDef(TestConvertTypeCommentsBase): """ self.assertCodemod39Plus(before, after) + def test_method_transforms(self) -> None: + before = """ + class A: + + def __init__(self, thing): # type: (str) -> None + self.thing = thing + + @classmethod + def make(cls): # type: () -> A + return cls("thing") + + @staticmethod + def f(x, y): # type: (object, object) -> None + pass + + def method0( + self, + other_thing, + ): # type: (str) -> bool + return self.thing == other_thing + + def method1( + self, # type: A + other_thing, # type: str + ): # type: (int) -> bool + return self.thing == other_thing + + def method2( + self, + other_thing, + ): # type: (A, str) -> bool + return self.thing == other_thing + """ + after = """ + class A: + + def __init__(self, thing: str) -> None: + self.thing = thing + + @classmethod + def make(cls) -> "A": + return cls("thing") + + @staticmethod + def f(x: object, y: object) -> None: + pass + + def method0( + self, + other_thing: str, + ) -> bool: + return self.thing == other_thing + + def method1( + self: "A", + other_thing: str, + ) -> bool: + return self.thing == other_thing + + def method2( + self: "A", + other_thing: str, + ) -> bool: + return self.thing == other_thing + """ + self.assertCodemod39Plus(before, after) + def test_no_change_if_function_type_comments_unused(self) -> None: before = """ # arity error in arguments From 3bd8c68207c2a7ed3f31702cf45d57d35e22844a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 28 Jan 2022 10:31:53 +0000 Subject: [PATCH 207/632] [ci] build wheels on main branch for linux/arm64 (#630) --- .github/build-matrix.json | 31 ++++++++++++++++++++++++++ .github/workflows/build.yml | 44 +++++++++++++++++++------------------ 2 files changed, 54 insertions(+), 21 deletions(-) create mode 100644 .github/build-matrix.json diff --git a/.github/build-matrix.json b/.github/build-matrix.json new file mode 100644 index 00000000..5a611b04 --- /dev/null +++ b/.github/build-matrix.json @@ -0,0 +1,31 @@ +[ + { + "vers": "x86_64", + "os": "ubuntu-20.04" + }, + { + "vers": "i686", + "os": "ubuntu-20.04" + }, + { + "vers": "arm64", + "os": "macos-10.15" + }, + { + "vers": "auto64", + "os": "macos-10.15" + }, + { + "vers": "auto64", + "os": "windows-2019" + }, + { + "vers": "aarch64", + "os": [ + "self-hosted", + "linux", + "ARM64" + ], + "on_ref_regex": "^refs/(heads/main|tags/.*)$" + } +] \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 20aee9da..007d64ec 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -147,27 +147,30 @@ jobs: name: sphinx-docs path: docs/build -# Build python package +# Build python wheels + build_matrix: + name: Prepare job matrix for build job + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - uses: actions/checkout@v1 + - id: set-matrix + # match github.ref to the on_ref_regex field in the json + # to skip running linux/aarch64 builds on PRs + run: | + matrix=$(jq --arg ref "${{ github.ref }}" \ + 'map(select(.on_ref_regex as $pat | $pat == null or ($ref | test($pat))) | del(.on_ref_regex))' \ + .github/build-matrix.json) + echo ::set-output name=matrix::{\"include\":$(echo $matrix)}\" + build: - name: Build wheels on ${{ matrix.os }}/${{ matrix.vers }} + name: Build wheels on ${{ join(matrix.os, '/') }}/${{ matrix.vers }} + needs: build_matrix runs-on: ${{ matrix.os }} strategy: fail-fast: false - matrix: - include: - - vers: i686 - os: ubuntu-20.04 - # aarch64 seems to be stuck - # - vers: aarch64 - # os: ubuntu-20.04 - - vers: auto64 - os: ubuntu-20.04 - - vers: arm64 - os: macos-10.15 - - vers: auto64 - os: macos-10.15 - - vers: auto64 - os: windows-2019 + matrix: ${{fromJson(needs.build_matrix.outputs.matrix)}} env: SCCACHE_VERSION: 0.2.13 CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" @@ -182,18 +185,17 @@ jobs: steps: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 + if: ${{ !contains(matrix.os, 'self-hosted') }} with: python-version: "3.10" - uses: actions/cache@v2 id: cache + if: ${{ !contains(matrix.os, 'self-hosted') }} with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Rust Cache + if: ${{ !contains(matrix.os, 'self-hosted') }} uses: Swatinem/rust-cache@v1.3.0 with: working-directory: native From 8ed3a9cd5ccac6354ff52aa04c3c87c3d197ea4b Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 28 Jan 2022 10:33:33 +0000 Subject: [PATCH 208/632] [native] Box most enums (#632) * Box most enums * add big nested expression as fixture --- native/libcst/src/nodes/expression.rs | 97 +++++---- native/libcst/src/nodes/statement.rs | 23 +-- native/libcst/src/parser/grammar.rs | 190 +++++++++--------- native/libcst/src/parser/numbers.rs | 16 +- .../tests/fixtures/big_binary_operator.py | 32 +++ 5 files changed, 192 insertions(+), 166 deletions(-) create mode 100644 native/libcst/tests/fixtures/big_binary_operator.py diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 53801c46..29f77deb 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -58,11 +58,10 @@ impl<'a> Inflate<'a> for Parameters<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Inflate)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum StarArg<'a> { - Star(ParamStar<'a>), + Star(Box>), Param(Box>), } @@ -397,39 +396,38 @@ impl<'a> Inflate<'a> for RightParen<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, Eq, PartialEq, Clone, ParenthesizedNode, Codegen, Inflate)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum Expression<'a> { - Name(Name<'a>), - Ellipsis(Ellipsis<'a>), - Integer(Integer<'a>), - Float(Float<'a>), - Imaginary(Imaginary<'a>), - Comparison(Comparison<'a>), - UnaryOperation(UnaryOperation<'a>), - BinaryOperation(BinaryOperation<'a>), - BooleanOperation(BooleanOperation<'a>), - Attribute(Attribute<'a>), - Tuple(Tuple<'a>), - Call(Call<'a>), - GeneratorExp(GeneratorExp<'a>), - ListComp(ListComp<'a>), - SetComp(SetComp<'a>), - DictComp(DictComp<'a>), - List(List<'a>), - Set(Set<'a>), - Dict(Dict<'a>), - Subscript(Subscript<'a>), - StarredElement(StarredElement<'a>), - IfExp(IfExp<'a>), - Lambda(Lambda<'a>), - Yield(Yield<'a>), - Await(Await<'a>), - SimpleString(SimpleString<'a>), - ConcatenatedString(ConcatenatedString<'a>), - FormattedString(FormattedString<'a>), - NamedExpr(NamedExpr<'a>), + Name(Box>), + Ellipsis(Box>), + Integer(Box>), + Float(Box>), + Imaginary(Box>), + Comparison(Box>), + UnaryOperation(Box>), + BinaryOperation(Box>), + BooleanOperation(Box>), + Attribute(Box>), + Tuple(Box>), + Call(Box>), + GeneratorExp(Box>), + ListComp(Box>), + SetComp(Box>), + DictComp(Box>), + List(Box>), + Set(Box>), + Dict(Box>), + Subscript(Box>), + StarredElement(Box>), + IfExp(Box>), + Lambda(Box>), + Yield(Box>), + Await(Box>), + SimpleString(Box>), + ConcatenatedString(Box>), + FormattedString(Box>), + NamedExpr(Box>), } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] @@ -739,12 +737,11 @@ impl<'a> Codegen<'a> for Attribute<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum NameOrAttribute<'a> { - N(Name<'a>), - A(Attribute<'a>), + N(Box>), + A(Box>), } impl<'a> std::convert::From> for Expression<'a> { @@ -833,7 +830,7 @@ pub enum Element<'a> { value: Expression<'a>, comma: Option>, }, - Starred(StarredElement<'a>), + Starred(Box>), } impl<'a> Element<'a> { @@ -863,7 +860,7 @@ impl<'a> Element<'a> { pub fn inflate_element(self, config: &Config<'a>, is_last: bool) -> Result { Ok(match self { - Self::Starred(s) => Self::Starred(s.inflate_element(config, is_last)?), + Self::Starred(s) => Self::Starred(Box::new(s.inflate_element(config, is_last)?)), Self::Simple { value, comma } => Self::Simple { value: value.inflate(config)?, comma: if is_last { @@ -881,7 +878,10 @@ impl<'a> WithComma<'a> for Element<'a> { let comma = Some(comma); match self { Self::Simple { value, .. } => Self::Simple { comma, value }, - Self::Starred(s) => Self::Starred(StarredElement { comma, ..s }), + Self::Starred(mut s) => { + s.comma = comma; + Self::Starred(s) + } } } } @@ -1429,7 +1429,6 @@ impl<'a> Codegen<'a> for Dict<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone)] pub enum DictElement<'a> { Simple { @@ -1577,12 +1576,11 @@ impl<'a> Codegen<'a> for StarredDictElement<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum BaseSlice<'a> { - Index(Index<'a>), - Slice(Slice<'a>), + Index(Box>), + Slice(Box>), } #[derive(Debug, PartialEq, Eq, Clone)] @@ -1865,12 +1863,11 @@ impl<'a> Inflate<'a> for From<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum YieldValue<'a> { - Expression(Expression<'a>), - From(From<'a>), + Expression(Box>), + From(Box>), } impl<'a> Inflate<'a> for YieldValue<'a> { @@ -1972,7 +1969,6 @@ impl<'a> Codegen<'a> for Await<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum String<'a> { @@ -1984,9 +1980,9 @@ pub enum String<'a> { impl<'a> std::convert::From> for Expression<'a> { fn from(s: String<'a>) -> Self { match s { - String::Simple(s) => Self::SimpleString(s), - String::Concatenated(s) => Self::ConcatenatedString(s), - String::Formatted(s) => Self::FormattedString(s), + String::Simple(s) => Self::SimpleString(Box::new(s)), + String::Concatenated(s) => Self::ConcatenatedString(Box::new(s)), + String::Formatted(s) => Self::FormattedString(Box::new(s)), } } } @@ -2130,12 +2126,11 @@ impl<'a> Codegen<'a> for FormattedStringExpression<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum FormattedStringContent<'a> { Text(FormattedStringText<'a>), - Expression(FormattedStringExpression<'a>), + Expression(Box>), } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 93f540d0..a740b2c9 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -482,12 +482,12 @@ impl<'a> Inflate<'a> for AssignTarget<'a> { #[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum AssignTargetExpression<'a> { - Name(Name<'a>), - Attribute(Attribute<'a>), - StarredElement(StarredElement<'a>), - Tuple(Tuple<'a>), - List(List<'a>), - Subscript(Subscript<'a>), + Name(Box>), + Attribute(Box>), + StarredElement(Box>), + Tuple(Box>), + List(Box>), + Subscript(Box>), } #[derive(Debug, PartialEq, Eq, Clone)] @@ -2113,15 +2113,14 @@ impl<'a> Inflate<'a> for With<'a> { } } -#[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate)] #[cfg_attr(feature = "py", derive(IntoPy))] pub enum DelTargetExpression<'a> { - Name(Name<'a>), - Attribute(Attribute<'a>), - Tuple(Tuple<'a>), - List(List<'a>), - Subscript(Subscript<'a>), + Name(Box>), + Attribute(Box>), + Tuple(Box>), + List(Box>), + Subscript(Box>), } impl<'a> std::convert::From> for Expression<'a> { diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index cc0511dd..8c1e5e34 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -158,8 +158,8 @@ parser! { rule assignment() -> SmallStatement<'a> = a:name() col:lit(":") ann:expression() rhs:(eq:lit("=") d:annotated_rhs() {(eq, d)})? { - SmallStatement::AnnAssign( - make_ann_assignment(AssignTargetExpression::Name(a), col, ann, rhs)) + SmallStatement::AnnAssign(make_ann_assignment( + AssignTargetExpression::Name(Box::new(a)), col, ann, rhs)) } // TODO: there's an extra '(' single_target ')' clause here in upstream / a:single_subscript_attribute_target() col:lit(":") ann:expression() @@ -268,7 +268,7 @@ parser! { rule import_from_as_name() -> ImportAlias<'a> = n:name() asname:(kw:lit("as") z:name() {(kw, z)})? { - make_import_alias(NameOrAttribute::N(n), asname) + make_import_alias(NameOrAttribute::N(Box::new(n)), asname) } rule dotted_as_names() -> Vec> @@ -383,7 +383,7 @@ parser! { add_param_star(a, star)))), b, kw) } / lit("*") c:comma() b:param_maybe_default()+ kw:kwds()? { - StarEtc(Some(StarArg::Star(ParamStar {comma:c })), b, kw) + StarEtc(Some(StarArg::Star(Box::new(ParamStar {comma:c }))), b, kw) } / kw:kwds() { StarEtc(None, vec![], Some(kw)) } @@ -549,8 +549,8 @@ parser! { rule subject_expr() -> Expression<'a> = first:star_named_expression() c:comma() rest:star_named_expressions()? { - Expression::Tuple( - make_tuple_from_elements(first.with_comma(c), rest.unwrap_or_default()) + Expression::Tuple(Box::new( + make_tuple_from_elements(first.with_comma(c), rest.unwrap_or_default())) ) } / named_expression() @@ -605,9 +605,9 @@ parser! { = val:signed_number() !(lit("+") / lit("-")) { val } / val:complex_number() { val } / val:strings() { val.into() } - / n:lit("None") { Expression::Name(make_name(n)) } - / n:lit("True") { Expression::Name(make_name(n)) } - / n:lit("False") { Expression::Name(make_name(n)) } + / n:lit("None") { Expression::Name(Box::new(make_name(n))) } + / n:lit("True") { Expression::Name(Box::new(make_name(n))) } + / n:lit("False") { Expression::Name(Box::new(make_name(n))) } rule complex_number() -> Expression<'a> = re:signed_real_number() op:(lit("+")/lit("-")) im:imaginary_number() {? @@ -654,9 +654,9 @@ parser! { #[cache_left_rec] rule name_or_attr() -> NameOrAttribute<'a> = val:name_or_attr() d:lit(".") attr:name() { - NameOrAttribute::A(make_attribute(val.into(), d, attr)) + NameOrAttribute::A(Box::new(make_attribute(val.into(), d, attr))) } - / n:name() { NameOrAttribute::N(n) } + / n:name() { NameOrAttribute::N(Box::new(n)) } rule group_pattern() -> MatchPattern<'a> = l:lpar() pat:pattern() r:rpar() { pat.with_parens(l, r) } @@ -760,33 +760,33 @@ parser! { rule _conditional_expression() -> Expression<'a> = body:disjunction() i:lit("if") test:disjunction() e:lit("else") oe:expression() { - Expression::IfExp(make_ifexp(body, i, test, e, oe)) + Expression::IfExp(Box::new(make_ifexp(body, i, test, e, oe))) } / disjunction() rule yield_expr() -> Expression<'a> = y:lit("yield") f:lit("from") a:expression() { - Expression::Yield(make_yield(y, Some(f), Some(a))) + Expression::Yield(Box::new(make_yield(y, Some(f), Some(a)))) } / y:lit("yield") a:star_expressions()? { - Expression::Yield(make_yield(y, None, a)) + Expression::Yield(Box::new(make_yield(y, None, a))) } rule star_expressions() -> Expression<'a> = first:star_expression() rest:(comma:comma() e:star_expression() { (comma, expr_to_element(e)) })+ comma:comma()? { - Expression::Tuple(make_tuple(expr_to_element(first), rest, comma, None, None)) + Expression::Tuple(Box::new(make_tuple(expr_to_element(first), rest, comma, None, None))) } / e:star_expression() comma:comma() { - Expression::Tuple(make_tuple(expr_to_element(e), vec![], Some(comma), None, None)) + Expression::Tuple(Box::new(make_tuple(expr_to_element(e), vec![], Some(comma), None, None))) } / star_expression() #[cache] rule star_expression() -> Expression<'a> = star:lit("*") e:bitwise_or() { - Expression::StarredElement(make_starred_element(star, expr_to_element(e))) + Expression::StarredElement(Box::new(make_starred_element(star, expr_to_element(e)))) } / expression() @@ -797,13 +797,13 @@ parser! { rule star_named_expression() -> Element<'a> = star:lit("*") e:bitwise_or() { - Element::Starred(make_starred_element(star, expr_to_element(e))) + Element::Starred(Box::new(make_starred_element(star, expr_to_element(e)))) } / e:named_expression() { expr_to_element(e) } rule named_expression() -> Expression<'a> = a:name() op:lit(":=") b:expression() { - Expression::NamedExpr(make_named_expr(a, op, b)) + Expression::NamedExpr(Box::new(make_named_expr(a, op, b))) } / e:expression() !lit(":=") { e } @@ -947,23 +947,23 @@ parser! { rule await_primary() -> Expression<'a> = aw:tok(AWAIT, "AWAIT") e:primary() { - Expression::Await(make_await(aw, e)) + Expression::Await(Box::new(make_await(aw, e))) } / primary() #[cache_left_rec] rule primary() -> Expression<'a> = v:primary() dot:lit(".") attr:name() { - Expression::Attribute(make_attribute(v, dot, attr)) + Expression::Attribute(Box::new(make_attribute(v, dot, attr))) } / a:primary() b:genexp() { - Expression::Call(make_genexp_call(a, b)) + Expression::Call(Box::new(make_genexp_call(a, b))) } / f:primary() lpar:lit("(") arg:arguments()? rpar:lit(")") { - Expression::Call(make_call(f, lpar, arg.unwrap_or_default(), rpar)) + Expression::Call(Box::new(make_call(f, lpar, arg.unwrap_or_default(), rpar))) } / v:primary() lbrak:lbrak() s:slices() rbrak:rbrak() { - Expression::Subscript(make_subscript(v, lbrak, s, rbrak)) + Expression::Subscript(Box::new(make_subscript(v, lbrak, s, rbrak))) } / atom() @@ -981,16 +981,16 @@ parser! { / v:expression() { make_index(v) } rule atom() -> Expression<'a> - = n:name() { Expression::Name(n) } - / n:lit("True") { Expression::Name(make_name(n)) } - / n:lit("False") { Expression::Name(make_name(n)) } - / n:lit("None") { Expression::Name(make_name(n)) } + = n:name() { Expression::Name(Box::new(n)) } + / n:lit("True") { Expression::Name(Box::new(make_name(n))) } + / n:lit("False") { Expression::Name(Box::new(make_name(n))) } + / n:lit("None") { Expression::Name(Box::new(make_name(n))) } / &(tok(STRING, "") / tok(FStringStart, "")) s:strings() {s.into()} / n:tok(Number, "NUMBER") { make_number(n) } - / &lit("(") e:(tuple() / group() / (g:genexp() {Expression::GeneratorExp(g)})) {e} + / &lit("(") e:(tuple() / group() / (g:genexp() {Expression::GeneratorExp(Box::new(g))})) {e} / &lit("[") e:(list() / listcomp()) {e} / &lit("{") e:(dict() / set() / dictcomp() / setcomp()) {e} - / lit("...") { Expression::Ellipsis(Ellipsis {lpar: vec![], rpar: vec![]})} + / lit("...") { Expression::Ellipsis(Box::new(Ellipsis {lpar: vec![], rpar: vec![]}))} rule group() -> Expression<'a> = lpar:lpar() e:(yield_expr() / named_expression()) rpar:rpar() { @@ -1001,7 +1001,7 @@ parser! { rule lambdef() -> Expression<'a> = kw:lit("lambda") p:lambda_params()? c:lit(":") b:expression() { - Expression::Lambda(make_lambda(kw, p.unwrap_or_default(), c, b)) + Expression::Lambda(Box::new(make_lambda(kw, p.unwrap_or_default(), c, b))) } rule lambda_params() -> Parameters<'a> @@ -1055,7 +1055,7 @@ parser! { )), b, kw) } / lit("*") c:comma() b:lambda_param_maybe_default()+ kw:lambda_kwds()? { - StarEtc(Some(StarArg::Star(ParamStar {comma: c})), b, kw) + StarEtc(Some(StarArg::Star(Box::new(ParamStar {comma: c}))), b, kw) } / kw:lambda_kwds() { StarEtc(None, vec![], Some(kw)) } @@ -1099,8 +1099,8 @@ parser! { rule list() -> Expression<'a> = lbrak:lbrak() e:star_named_expressions()? rbrak:rbrak() { - Expression::List( - make_list(lbrak, e.unwrap_or_default(), rbrak) + Expression::List(Box::new( + make_list(lbrak, e.unwrap_or_default(), rbrak)) ) } @@ -1108,25 +1108,25 @@ parser! { = lpar:lpar() first:star_named_expression() &lit(",") rest:(c:comma() e:star_named_expression() {(c, e)})* trailing_comma:comma()? rpar:rpar() { - Expression::Tuple( + Expression::Tuple(Box::new( make_tuple(first, rest, trailing_comma, Some(lpar), Some(rpar)) - ) + )) } / lpar:lpar() rpar:lit(")") { - Expression::Tuple(Tuple::default().with_parens( + Expression::Tuple(Box::new(Tuple::default().with_parens( lpar, RightParen { whitespace_before: Default::default(), rpar_tok: rpar } - ))} + )))} rule set() -> Expression<'a> = lbrace:lbrace() e:star_named_expressions()? rbrace:rbrace() { - Expression::Set(make_set(lbrace, e.unwrap_or_default(), rbrace)) + Expression::Set(Box::new(make_set(lbrace, e.unwrap_or_default(), rbrace))) } // Dicts rule dict() -> Expression<'a> = lbrace:lbrace() els:double_starred_keypairs()? rbrace:rbrace() { - Expression::Dict(make_dict(lbrace, els.unwrap_or_default(), rbrace)) + Expression::Dict(Box::new(make_dict(lbrace, els.unwrap_or_default(), rbrace))) } @@ -1166,12 +1166,12 @@ parser! { rule listcomp() -> Expression<'a> = lbrak:lbrak() elt:named_expression() comp:for_if_clauses() rbrak:rbrak() { - Expression::ListComp(make_list_comp(lbrak, elt, comp, rbrak)) + Expression::ListComp(Box::new(make_list_comp(lbrak, elt, comp, rbrak))) } rule setcomp() -> Expression<'a> = l:lbrace() elt:named_expression() comp:for_if_clauses() r:rbrace() { - Expression::SetComp(make_set_comp(l, elt, comp, r)) + Expression::SetComp(Box::new(make_set_comp(l, elt, comp, r))) } rule genexp() -> GeneratorExp<'a> @@ -1186,7 +1186,7 @@ parser! { rule dictcomp() -> Expression<'a> = lbrace:lbrace() elt:kvpair() comp:for_if_clauses() rbrace:rbrace() { - Expression::DictComp(make_dict_comp(lbrace, elt, comp, rbrace)) + Expression::DictComp(Box::new(make_dict_comp(lbrace, elt, comp, rbrace))) } // Function call arguments @@ -1248,9 +1248,9 @@ parser! { rule star_targets() -> AssignTargetExpression<'a> = a:star_target() !lit(",") {a} / targets:separated_trailer(, ) { - AssignTargetExpression::Tuple( + AssignTargetExpression::Tuple(Box::new( make_tuple(targets.0, targets.1, targets.2, None, None) - ) + )) } rule star_targets_list_seq() -> Vec> @@ -1273,67 +1273,67 @@ parser! { #[cache] rule star_target() -> AssignTargetExpression<'a> = star:lit("*") !lit("*") t:star_target() { - AssignTargetExpression::StarredElement( + AssignTargetExpression::StarredElement(Box::new( make_starred_element(star, assign_target_to_element(t)) - ) + )) } / target_with_star_atom() #[cache] rule target_with_star_atom() -> AssignTargetExpression<'a> = a:t_primary() dot:lit(".") n:name() !t_lookahead() { - AssignTargetExpression::Attribute(make_attribute(a, dot, n)) + AssignTargetExpression::Attribute(Box::new(make_attribute(a, dot, n))) } / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { - AssignTargetExpression::Subscript( + AssignTargetExpression::Subscript(Box::new( make_subscript(a, lbrak, s, rbrak) - ) + )) } / a:star_atom() {a} rule star_atom() -> AssignTargetExpression<'a> - = a:name() { AssignTargetExpression::Name(a) } + = a:name() { AssignTargetExpression::Name(Box::new(a)) } / lpar:lpar() a:target_with_star_atom() rpar:rpar() { a.with_parens(lpar, rpar) } / lpar:lpar() a:star_targets_tuple_seq()? rpar:rpar() { - AssignTargetExpression::Tuple( + AssignTargetExpression::Tuple(Box::new( a.unwrap_or_default().with_parens(lpar, rpar) - ) + )) } / lbrak:lbrak() a:star_targets_list_seq()? rbrak:rbrak() { - AssignTargetExpression::List( + AssignTargetExpression::List(Box::new( make_list(lbrak, a.unwrap_or_default(), rbrak) - ) + )) } rule single_target() -> AssignTargetExpression<'a> = single_subscript_attribute_target() - / n:name() { AssignTargetExpression::Name(n) } + / n:name() { AssignTargetExpression::Name(Box::new(n)) } / lpar:lpar() t:single_target() rpar:rpar() { t.with_parens(lpar, rpar) } rule single_subscript_attribute_target() -> AssignTargetExpression<'a> = a:t_primary() dot:lit(".") n:name() !t_lookahead() { - AssignTargetExpression::Attribute(make_attribute(a, dot, n)) + AssignTargetExpression::Attribute(Box::new(make_attribute(a, dot, n))) } / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { - AssignTargetExpression::Subscript( + AssignTargetExpression::Subscript(Box::new( make_subscript(a, lbrak, s, rbrak) - ) + )) } #[cache_left_rec] rule t_primary() -> Expression<'a> = value:t_primary() dot:lit(".") attr:name() &t_lookahead() { - Expression::Attribute(make_attribute(value, dot, attr)) + Expression::Attribute(Box::new(make_attribute(value, dot, attr))) } / v:t_primary() l:lbrak() s:slices() r:rbrak() &t_lookahead() { - Expression::Subscript(make_subscript(v, l, s, r)) + Expression::Subscript(Box::new(make_subscript(v, l, s, r))) } / f:t_primary() gen:genexp() &t_lookahead() { - Expression::Call(make_genexp_call(f, gen)) + Expression::Call(Box::new(make_genexp_call(f, gen))) } / f:t_primary() lpar:lit("(") arg:arguments()? rpar:lit(")") &t_lookahead() { - Expression::Call(make_call(f, lpar, arg.unwrap_or_default(), rpar)) + Expression::Call(Box::new(make_call(f, lpar, arg.unwrap_or_default(), rpar))) } / a:atom() &t_lookahead() {a} @@ -1349,25 +1349,25 @@ parser! { rule del_target() -> DelTargetExpression<'a> = a:t_primary() d:lit(".") n:name() !t_lookahead() { - DelTargetExpression::Attribute(make_attribute(a, d, n)) + DelTargetExpression::Attribute(Box::new(make_attribute(a, d, n))) } / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { - DelTargetExpression::Subscript( + DelTargetExpression::Subscript(Box::new( make_subscript(a, lbrak, s, rbrak) - ) + )) } / del_t_atom() rule del_t_atom() -> DelTargetExpression<'a> - = n:name() { DelTargetExpression::Name(n) } + = n:name() { DelTargetExpression::Name(Box::new(n)) } / l:lpar() d:del_target() r:rpar() { d.with_parens(l, r) } / l:lpar() d:del_targets()? r:rpar() { make_del_tuple(Some(l), d.unwrap_or_default(), Some(r)) } / l:lbrak() d:del_targets()? r:rbrak() { - DelTargetExpression::List( + DelTargetExpression::List(Box::new( make_list(l, d.unwrap_or_default(), r) - ) + )) } // F-strings @@ -1389,13 +1389,13 @@ parser! { conv:(t:lit("!") c:_f_conversion() {(t,c)})? spec:(t:lit(":") s:_f_spec() {(t,s)})? rb:lit("}") { - FormattedStringContent::Expression( + FormattedStringContent::Expression(Box::new( make_fstring_expression(lb, e, eq, conv, spec, rb) - ) + )) } rule _f_expr() -> Expression<'a> - = (g:_bare_genexp() {Expression::GeneratorExp(g)}) + = (g:_bare_genexp() {Expression::GeneratorExp(Box::new(g))}) / star_expressions() / yield_expr() @@ -1542,12 +1542,12 @@ fn make_comparison<'a>( comparator: e, }); } - Expression::Comparison(Comparison { + Expression::Comparison(Box::new(Comparison { left: Box::new(head), comparisons, lpar: vec![], rpar: vec![], - }) + })) } fn make_comparison_operator(tok: TokenRef) -> Result { @@ -1635,13 +1635,13 @@ fn make_boolean_op<'a>( let mut expr = head; for (tok, right) in tail { - expr = Expression::BooleanOperation(BooleanOperation { + expr = Expression::BooleanOperation(Box::new(BooleanOperation { left: Box::new(expr), operator: make_boolean_operator(tok)?, right: Box::new(right), lpar: vec![], rpar: vec![], - }) + })) } Ok(expr) } @@ -1670,13 +1670,13 @@ fn make_binary_op<'a>( right: Expression<'a>, ) -> Result<'a, Expression<'a>> { let operator = make_binary_operator(op)?; - Ok(Expression::BinaryOperation(BinaryOperation { + Ok(Expression::BinaryOperation(Box::new(BinaryOperation { left: Box::new(left), operator, right: Box::new(right), lpar: vec![], rpar: vec![], - })) + }))) } fn make_binary_operator(tok: TokenRef) -> Result { @@ -1755,12 +1755,12 @@ fn make_binary_operator(tok: TokenRef) -> Result { fn make_unary_op<'a>(op: TokenRef<'a>, tail: Expression<'a>) -> Result<'a, Expression<'a>> { let operator = make_unary_operator(op)?; - Ok(Expression::UnaryOperation(UnaryOperation { + Ok(Expression::UnaryOperation(Box::new(UnaryOperation { operator, expression: Box::new(tail), lpar: vec![], rpar: vec![], - })) + }))) } fn make_unary_operator(tok: TokenRef) -> Result { @@ -1974,15 +1974,15 @@ fn make_name_or_attr<'a>( ) -> NameOrAttribute<'a> { if let Some((dot, name)) = tail.pop() { let dot = make_dot(dot); - return NameOrAttribute::A(Attribute { + return NameOrAttribute::A(Box::new(Attribute { attr: name, dot, lpar: Default::default(), rpar: Default::default(), value: Box::new(make_name_or_attr(first_tok, tail).into()), - }); + })); } else { - NameOrAttribute::N(first_tok) + NameOrAttribute::N(Box::new(first_tok)) } } @@ -2007,7 +2007,7 @@ fn make_import_alias<'a>( ) -> ImportAlias<'a> { ImportAlias { name, - asname: asname.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))), + asname: asname.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))), comma: None, } } @@ -2280,7 +2280,7 @@ fn make_genexp_call<'a>(func: Expression<'a>, mut genexp: GeneratorExp<'a>) -> C Call { func: Box::new(func), args: vec![Arg { - value: Expression::GeneratorExp(genexp), + value: Expression::GeneratorExp(Box::new(genexp)), keyword: None, equal: None, comma: None, @@ -2558,7 +2558,7 @@ fn make_double_starred_element<'a>( } fn make_index(value: Expression) -> BaseSlice { - BaseSlice::Index(Index { value }) + BaseSlice::Index(Box::new(Index { value })) } fn make_colon(tok: TokenRef) -> Colon { @@ -2583,13 +2583,13 @@ fn make_slice<'a>( } else { (None, None) }; - BaseSlice::Slice(Slice { + BaseSlice::Slice(Box::new(Slice { lower, upper, step, first_colon, second_colon, - }) + })) } fn make_slices<'a>( @@ -2717,8 +2717,8 @@ fn make_yield<'a>( ) -> Yield<'a> { let value = match (f, e) { (None, None) => None, - (Some(f), Some(e)) => Some(YieldValue::From(make_from(f, e))), - (None, Some(e)) => Some(YieldValue::Expression(e)), + (Some(f), Some(e)) => Some(YieldValue::From(Box::new(make_from(f, e)))), + (None, Some(e)) => Some(YieldValue::Expression(Box::new(e))), _ => panic!("yield from without expression"), }; Yield { @@ -3049,7 +3049,7 @@ fn make_except<'a>( body: Suite<'a>, ) -> ExceptHandler<'a> { // TODO: AsName should come from outside - let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))); + let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))); ExceptHandler { body, r#type: exp, @@ -3071,7 +3071,7 @@ fn make_except_star<'a>( body: Suite<'a>, ) -> ExceptStarHandler<'a> { // TODO: AsName should come from outside - let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))); + let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))); ExceptStarHandler { body, r#type: exp, @@ -3267,16 +3267,16 @@ fn make_del_tuple<'a>( elements: Vec>, rpar: Option>, ) -> DelTargetExpression<'a> { - DelTargetExpression::Tuple(Tuple { + DelTargetExpression::Tuple(Box::new(Tuple { elements, lpar: lpar.map(|x| vec![x]).unwrap_or_default(), rpar: rpar.map(|x| vec![x]).unwrap_or_default(), - }) + })) } fn make_named_expr<'a>(name: Name<'a>, tok: TokenRef<'a>, expr: Expression<'a>) -> NamedExpr<'a> { NamedExpr { - target: Box::new(Expression::Name(name)), + target: Box::new(Expression::Name(Box::new(name))), value: Box::new(expr), lpar: Default::default(), rpar: Default::default(), diff --git a/native/libcst/src/parser/numbers.rs b/native/libcst/src/parser/numbers.rs index 19d12214..99a7d792 100644 --- a/native/libcst/src/parser/numbers.rs +++ b/native/libcst/src/parser/numbers.rs @@ -46,28 +46,28 @@ static IMAGINARY_RE: Lazy = Lazy::new(|| { pub(crate) fn parse_number(raw: &str) -> Expression { if INTEGER_RE.is_match(raw) { - Expression::Integer(Integer { + Expression::Integer(Box::new(Integer { value: raw, lpar: Default::default(), rpar: Default::default(), - }) + })) } else if FLOAT_RE.is_match(raw) { - Expression::Float(Float { + Expression::Float(Box::new(Float { value: raw, lpar: Default::default(), rpar: Default::default(), - }) + })) } else if IMAGINARY_RE.is_match(raw) { - Expression::Imaginary(Imaginary { + Expression::Imaginary(Box::new(Imaginary { value: raw, lpar: Default::default(), rpar: Default::default(), - }) + })) } else { - Expression::Integer(Integer { + Expression::Integer(Box::new(Integer { value: raw, lpar: Default::default(), rpar: Default::default(), - }) + })) } } diff --git a/native/libcst/tests/fixtures/big_binary_operator.py b/native/libcst/tests/fixtures/big_binary_operator.py new file mode 100644 index 00000000..2ab0d65c --- /dev/null +++ b/native/libcst/tests/fixtures/big_binary_operator.py @@ -0,0 +1,32 @@ +( # 350 binary operators lets go + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' + + 'X' + 'Y' + 'Z' + 'Q' + 'T' +) From dba8296e4b89381a3575cd6cce56ff17177cc5cc Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 28 Jan 2022 10:34:23 +0000 Subject: [PATCH 209/632] [native] Return tuples instead of lists in CST (#631) --- native/libcst_derive/src/into_py.rs | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs index 2ba4160c..3530b3d8 100644 --- a/native/libcst_derive/src/into_py.rs +++ b/native/libcst_derive/src/into_py.rs @@ -108,6 +108,8 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt let mut rust_varnames = vec![]; let mut optional_py_varnames = vec![]; let mut optional_rust_varnames = vec![]; + let mut vec_py_varnames = vec![]; + let mut vec_rust_varnames = vec![]; match &fields { Fields::Named(FieldsNamed { named, .. }) => { for field in named.iter() { @@ -138,12 +140,23 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt } } } + if let Type::Path(TypePath { path, .. }) = &field.ty { + if let Some(first) = path.segments.first() { + if first.ident == "Vec" { + vec_py_varnames.push(pyname); + vec_rust_varnames.push(rustname); + continue; + } + } + } py_varnames.push(pyname); rust_varnames.push(rustname); } } } - empty_kwargs = py_varnames.is_empty() && optional_py_varnames.is_empty(); + empty_kwargs = py_varnames.is_empty() + && optional_py_varnames.is_empty() + && vec_py_varnames.is_empty(); } Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { if unnamed.first().is_some() { @@ -163,11 +176,22 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt let optional_pairs = quote! { #(#optional_rust_varnames.map(|x| (stringify!(#optional_py_varnames), x.into_py(py))),)* }; + let vec_pairs = quote! { + #(Some(( + stringify!(#vec_py_varnames), + pyo3::IntoPy::::into_py( + pyo3::types::PyTuple::new( + py, + #vec_rust_varnames.into_iter().map(|x| x.into_py(py)), + ), + py, + ))),)* + }; if empty_kwargs { quote! { pyo3::types::PyDict::new(py) } } else { quote! { - [ #kwargs_pairs #optional_pairs ] + [ #kwargs_pairs #optional_pairs #vec_pairs ] .iter() .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) From e0744c7d143ac32be34439de23dae26b7c7e381c Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 28 Jan 2022 10:44:12 +0000 Subject: [PATCH 210/632] bump version to 0.4.1 --- CHANGELOG.md | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a54d67cd..1db26c2f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,34 @@ +# 0.4.1 - 2022-01-28 + +## New Contributors +* @ariebovenberg made their first contribution in https://github.com/Instagram/LibCST/pull/605 +* @sehz made their first contribution in https://github.com/Instagram/LibCST/pull/598 + +## Added +* Add docs about the native parts by @zsol in https://github.com/Instagram/LibCST/pull/601 +* Specify minimum rust toolchain version by @zsol in https://github.com/Instagram/LibCST/pull/614 +* build wheels on main branch for linux/arm64 by @zsol in https://github.com/Instagram/LibCST/pull/630 + +## Updated +* ApplyTypeAnnotationVisitor changes + * Add support for methods with func type comment excluding self/cls by @stroxler in https://github.com/Instagram/LibCST/pull/622 + * Merge in TypeVars and Generic base classes in ApplyTypeAnnotationVisitor by @martindemello in https://github.com/Instagram/LibCST/pull/596 + * add slots to base classes, @add_slots takes bases into account by @ariebovenberg in https://github.com/Instagram/LibCST/pull/605 + * Full handling for applying type comments to Assign by @stroxler in https://github.com/Instagram/LibCST/pull/599 + * Add support for For and With by @stroxler in https://github.com/Instagram/LibCST/pull/607 + * Support FunctionDef transformations by @stroxler in https://github.com/Instagram/LibCST/pull/610 +* change pyo3 as optional dependency in native Python Parser by @sehz in https://github.com/Instagram/LibCST/pull/598 +* [native] Box most enums by @zsol in https://github.com/Instagram/LibCST/pull/632 +* [native] Return tuples instead of lists in CST nodes by @zsol in https://github.com/Instagram/LibCST/pull/631 + +## Fixed +* Allow trailing whitespace without newline at EOF by @zsol in https://github.com/Instagram/LibCST/pull/611 +* Handle ast.parse failures when converting function type comments by @stroxler in https://github.com/Instagram/LibCST/pull/616 +* [native] Don't redundantly nest StarredElement inside another Element by @isidentical in https://github.com/Instagram/LibCST/pull/624 +* [native] Allow unparenthesized tuples inside f-strings by @isidentical in https://github.com/Instagram/LibCST/pull/621 +* Don't require whitespace right after match by @isidentical in https://github.com/Instagram/LibCST/pull/628 +* Proxy both parentheses in some pattern matching nodes by @isidentical in https://github.com/Instagram/LibCST/pull/626 + # 0.4.0 - 2022-01-12 This release contains a new parsing infrastructure that is turned off by default. You From c5b073599d9a732bd138b6933a6bf0209ac8e038 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 28 Jan 2022 11:22:49 +0000 Subject: [PATCH 211/632] fix typo in changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1db26c2f..e020592a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,11 +13,11 @@ * ApplyTypeAnnotationVisitor changes * Add support for methods with func type comment excluding self/cls by @stroxler in https://github.com/Instagram/LibCST/pull/622 * Merge in TypeVars and Generic base classes in ApplyTypeAnnotationVisitor by @martindemello in https://github.com/Instagram/LibCST/pull/596 - * add slots to base classes, @add_slots takes bases into account by @ariebovenberg in https://github.com/Instagram/LibCST/pull/605 * Full handling for applying type comments to Assign by @stroxler in https://github.com/Instagram/LibCST/pull/599 * Add support for For and With by @stroxler in https://github.com/Instagram/LibCST/pull/607 * Support FunctionDef transformations by @stroxler in https://github.com/Instagram/LibCST/pull/610 * change pyo3 as optional dependency in native Python Parser by @sehz in https://github.com/Instagram/LibCST/pull/598 +* add slots to base classes, @add_slots takes bases into account by @ariebovenberg in https://github.com/Instagram/LibCST/pull/605 * [native] Box most enums by @zsol in https://github.com/Instagram/LibCST/pull/632 * [native] Return tuples instead of lists in CST nodes by @zsol in https://github.com/Instagram/LibCST/pull/631 From c91655fbba7c27db05423673609f10203316b9c0 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 1 Feb 2022 11:13:17 +0000 Subject: [PATCH 212/632] fix copyright headers and add a script to check (#635) --- check_copyright.sh | 30 +++++++++++++++++++ libcst/_nodes/tests/test_match.py | 2 +- libcst/_parser/parso/python/token.py | 2 +- libcst/_parser/py_whitespace_parser.py | 2 +- libcst/_parser/types/py_config.py | 2 +- libcst/_parser/types/py_token.py | 2 +- libcst/_parser/types/py_whitespace_state.py | 2 +- .../visitors/_apply_type_annotations.py | 2 +- .../tests/test_apply_type_annotations.py | 2 +- native/libcst/Cargo.toml | 2 +- native/libcst/benches/parser_benchmark.rs | 2 +- native/libcst/src/bin.rs | 2 +- native/libcst/src/lib.rs | 2 +- native/libcst/src/nodes/codegen.rs | 2 +- native/libcst/src/nodes/expression.rs | 2 +- native/libcst/src/nodes/inflate_helpers.rs | 2 +- native/libcst/src/nodes/macros.rs | 2 +- native/libcst/src/nodes/mod.rs | 2 +- native/libcst/src/nodes/module.rs | 2 +- native/libcst/src/nodes/op.rs | 2 +- native/libcst/src/nodes/parser_config.rs | 2 +- native/libcst/src/nodes/py_cached.rs | 2 +- native/libcst/src/nodes/statement.rs | 2 +- native/libcst/src/nodes/test_utils.rs | 2 +- native/libcst/src/nodes/traits.rs | 2 +- native/libcst/src/nodes/whitespace.rs | 2 +- native/libcst/src/parser/errors.rs | 2 +- native/libcst/src/parser/grammar.rs | 2 +- native/libcst/src/parser/mod.rs | 2 +- native/libcst/src/parser/numbers.rs | 2 +- native/libcst/src/py.rs | 2 +- native/libcst/src/tokenizer/core/mod.rs | 2 +- .../libcst/src/tokenizer/core/string_types.rs | 2 +- native/libcst/src/tokenizer/debug_utils.rs | 2 +- native/libcst/src/tokenizer/mod.rs | 2 +- native/libcst/src/tokenizer/operators.rs | 2 +- native/libcst/src/tokenizer/tests.rs | 2 +- .../src/tokenizer/text_position/char_width.rs | 2 +- .../libcst/src/tokenizer/text_position/mod.rs | 2 +- .../libcst/src/tokenizer/whitespace_parser.rs | 2 +- native/libcst/tests/parser_roundtrip.rs | 2 +- native/libcst_derive/src/codegen.rs | 2 +- native/libcst_derive/src/inflate.rs | 2 +- native/libcst_derive/src/into_py.rs | 2 +- native/libcst_derive/src/lib.rs | 2 +- .../libcst_derive/src/parenthesized_node.rs | 2 +- native/roundtrip.sh | 5 ++++ stubs/libcst/native.pyi | 2 +- stubs/libcst_native/parser_config.pyi | 2 +- stubs/libcst_native/token_type.pyi | 2 +- stubs/libcst_native/tokenize.pyi | 2 +- stubs/libcst_native/whitespace_parser.pyi | 2 +- stubs/libcst_native/whitespace_state.pyi | 2 +- 53 files changed, 86 insertions(+), 51 deletions(-) create mode 100755 check_copyright.sh diff --git a/check_copyright.sh b/check_copyright.sh new file mode 100755 index 00000000..3f4eab75 --- /dev/null +++ b/check_copyright.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +set -eu +EXITCODE=0 +error() { echo "$1"; EXITCODE=1; } + +EXCEPTION_PATTERNS=( + "^native/libcst/tests/fixtures/" + "^libcst/_add_slots\.py$" + "^libcst/tests/test_\(e2e\|fuzz\)\.py$" + "^libcst/_parser/base_parser\.py$" + "^libcst/_parser/parso/utils\.py$" + "^libcst/_parser/parso/pgen2/\(generator\|grammar_parser\)\.py$" + "^libcst/_parser/parso/python/\(py_token\|tokenize\)\.py$" + "^libcst/_parser/parso/tests/test_\(fstring\|tokenize\|utils\)\.py$" +) + + +while read filename; do \ + if ! head -n 16 "$filename" | grep -q "Copyright (c) Meta Platforms, Inc. and affiliates."; then + error "Missing copyright in $filename" + fi +done < <( git ls-tree -r --name-only HEAD | grep "\(.py\|\.sh\|\.rs\)$" | \ + grep -v "${EXCEPTION_PATTERNS[@]/#/-e}" ) +exit $EXITCODE \ No newline at end of file diff --git a/libcst/_nodes/tests/test_match.py b/libcst/_nodes/tests/test_match.py index a203ffef..2f1e4193 100644 --- a/libcst/_nodes/tests/test_match.py +++ b/libcst/_nodes/tests/test_match.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/parso/python/token.py b/libcst/_parser/parso/python/token.py index de883719..ea357874 100644 --- a/libcst/_parser/parso/python/token.py +++ b/libcst/_parser/parso/python/token.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/py_whitespace_parser.py b/libcst/_parser/py_whitespace_parser.py index a3c53a97..b1fd9b5e 100644 --- a/libcst/_parser/py_whitespace_parser.py +++ b/libcst/_parser/py_whitespace_parser.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/py_config.py b/libcst/_parser/types/py_config.py index cb91d558..d7732591 100644 --- a/libcst/_parser/types/py_config.py +++ b/libcst/_parser/types/py_config.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/py_token.py b/libcst/_parser/types/py_token.py index 7fac5eb8..d2f9b537 100644 --- a/libcst/_parser/types/py_token.py +++ b/libcst/_parser/types/py_token.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/py_whitespace_state.py b/libcst/_parser/types/py_whitespace_state.py index 99047590..6359e83e 100644 --- a/libcst/_parser/types/py_whitespace_state.py +++ b/libcst/_parser/types/py_whitespace_state.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 24cd50a7..ac954d2f 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -1,4 +1,4 @@ -# Copyright (c) 2016-present, Meta Platforms, Inc. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 8689afbd..03e5ffbd 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -1,4 +1,4 @@ -# Copyright (c) 2016-present, Meta Platforms, Inc. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index b0e111ee..d2916e17 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/native/libcst/benches/parser_benchmark.rs b/native/libcst/benches/parser_benchmark.rs index 7a12ebb3..ad96d867 100644 --- a/native/libcst/benches/parser_benchmark.rs +++ b/native/libcst/benches/parser_benchmark.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/bin.rs b/native/libcst/src/bin.rs index 1b42859a..1517cce4 100644 --- a/native/libcst/src/bin.rs +++ b/native/libcst/src/bin.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index 997ac0bc..d2e475ab 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/codegen.rs b/native/libcst/src/nodes/codegen.rs index 99cc377a..1ebf8d80 100644 --- a/native/libcst/src/nodes/codegen.rs +++ b/native/libcst/src/nodes/codegen.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 29f77deb..570c55ca 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/inflate_helpers.rs b/native/libcst/src/nodes/inflate_helpers.rs index 262de5ac..8bf5c8af 100644 --- a/native/libcst/src/nodes/inflate_helpers.rs +++ b/native/libcst/src/nodes/inflate_helpers.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/nodes/macros.rs b/native/libcst/src/nodes/macros.rs index 76498b8e..c9ed49c1 100644 --- a/native/libcst/src/nodes/macros.rs +++ b/native/libcst/src/nodes/macros.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index b6be09df..69463d99 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/nodes/module.rs b/native/libcst/src/nodes/module.rs index 21e7e66c..661c7bb7 100644 --- a/native/libcst/src/nodes/module.rs +++ b/native/libcst/src/nodes/module.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/op.rs b/native/libcst/src/nodes/op.rs index afa97d5d..800f8ebd 100644 --- a/native/libcst/src/nodes/op.rs +++ b/native/libcst/src/nodes/op.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/parser_config.rs b/native/libcst/src/nodes/parser_config.rs index acf140ee..6dfdf544 100644 --- a/native/libcst/src/nodes/parser_config.rs +++ b/native/libcst/src/nodes/parser_config.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/py_cached.rs b/native/libcst/src/nodes/py_cached.rs index 0a7fe691..d2398051 100644 --- a/native/libcst/src/nodes/py_cached.rs +++ b/native/libcst/src/nodes/py_cached.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index a740b2c9..d68ec055 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/test_utils.rs b/native/libcst/src/nodes/test_utils.rs index 109d471d..675b493d 100644 --- a/native/libcst/src/nodes/test_utils.rs +++ b/native/libcst/src/nodes/test_utils.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index b6ab115c..aabd27ef 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/nodes/whitespace.rs b/native/libcst/src/nodes/whitespace.rs index 13c4c8c4..59b314c2 100644 --- a/native/libcst/src/nodes/whitespace.rs +++ b/native/libcst/src/nodes/whitespace.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index 6438ffbe..aae7fdb6 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 8c1e5e34..3aaf7130 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/parser/mod.rs b/native/libcst/src/parser/mod.rs index 8e6ec8e1..05937ab3 100644 --- a/native/libcst/src/parser/mod.rs +++ b/native/libcst/src/parser/mod.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/parser/numbers.rs b/native/libcst/src/parser/numbers.rs index 99a7d792..286a33c7 100644 --- a/native/libcst/src/parser/numbers.rs +++ b/native/libcst/src/parser/numbers.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/py.rs b/native/libcst/src/py.rs index b938b691..4cc30fbe 100644 --- a/native/libcst/src/py.rs +++ b/native/libcst/src/py.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 0dd60944..9a869555 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -1,4 +1,4 @@ -// This implementation is Copyright (c) Meta Platforms, Inc. and its affiliates. +// This implementation is Copyright (c) Meta Platforms, Inc. and affiliates. // // CPython 3.10.0a5 and the original C code this is based on is // Copyright (c) 2001-2021 Python Software Foundation; All Rights Reserved diff --git a/native/libcst/src/tokenizer/core/string_types.rs b/native/libcst/src/tokenizer/core/string_types.rs index 27f0be36..8f9e0cf0 100644 --- a/native/libcst/src/tokenizer/core/string_types.rs +++ b/native/libcst/src/tokenizer/core/string_types.rs @@ -1,4 +1,4 @@ -// This implementation is Copyright (c) Meta Platforms, Inc. and its affiliates. +// This implementation is Copyright (c) Meta Platforms, Inc. and affiliates. // // CPython 3.10.0a5 and the original C code this is based on is // Copyright (c) 2001-2021 Python Software Foundation; All Rights Reserved diff --git a/native/libcst/src/tokenizer/debug_utils.rs b/native/libcst/src/tokenizer/debug_utils.rs index 543258a1..1e476a47 100644 --- a/native/libcst/src/tokenizer/debug_utils.rs +++ b/native/libcst/src/tokenizer/debug_utils.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/mod.rs b/native/libcst/src/tokenizer/mod.rs index 60f1a20d..9f7bbe2c 100644 --- a/native/libcst/src/tokenizer/mod.rs +++ b/native/libcst/src/tokenizer/mod.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/operators.rs b/native/libcst/src/tokenizer/operators.rs index 94f70805..e5ef1526 100644 --- a/native/libcst/src/tokenizer/operators.rs +++ b/native/libcst/src/tokenizer/operators.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index 2be070d1..300e6d29 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/text_position/char_width.rs b/native/libcst/src/tokenizer/text_position/char_width.rs index 879e1283..10c65a41 100644 --- a/native/libcst/src/tokenizer/text_position/char_width.rs +++ b/native/libcst/src/tokenizer/text_position/char_width.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/text_position/mod.rs b/native/libcst/src/tokenizer/text_position/mod.rs index f94d7311..9c394d52 100644 --- a/native/libcst/src/tokenizer/text_position/mod.rs +++ b/native/libcst/src/tokenizer/text_position/mod.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/whitespace_parser.rs b/native/libcst/src/tokenizer/whitespace_parser.rs index ec463f99..f09ce789 100644 --- a/native/libcst/src/tokenizer/whitespace_parser.rs +++ b/native/libcst/src/tokenizer/whitespace_parser.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/tests/parser_roundtrip.rs b/native/libcst/tests/parser_roundtrip.rs index b16a46f0..7618eaec 100644 --- a/native/libcst/tests/parser_roundtrip.rs +++ b/native/libcst/tests/parser_roundtrip.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst_derive/src/codegen.rs b/native/libcst_derive/src/codegen.rs index 4fce05df..7dee8adc 100644 --- a/native/libcst_derive/src/codegen.rs +++ b/native/libcst_derive/src/codegen.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst_derive/src/inflate.rs b/native/libcst_derive/src/inflate.rs index 323160c1..e457b068 100644 --- a/native/libcst_derive/src/inflate.rs +++ b/native/libcst_derive/src/inflate.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs index 3530b3d8..d37a92d3 100644 --- a/native/libcst_derive/src/into_py.rs +++ b/native/libcst_derive/src/into_py.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst_derive/src/lib.rs b/native/libcst_derive/src/lib.rs index 97d1e321..12ce4ff1 100644 --- a/native/libcst_derive/src/lib.rs +++ b/native/libcst_derive/src/lib.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst_derive/src/parenthesized_node.rs b/native/libcst_derive/src/parenthesized_node.rs index fe716510..9165dd1b 100644 --- a/native/libcst_derive/src/parenthesized_node.rs +++ b/native/libcst_derive/src/parenthesized_node.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and its affiliates. +// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/roundtrip.sh b/native/roundtrip.sh index 3f732143..c75241f7 100755 --- a/native/roundtrip.sh +++ b/native/roundtrip.sh @@ -1,5 +1,10 @@ #!/bin/bash +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + PARSE=$(dirname $0)/target/release/parse exec diff -u "$1" <($PARSE < "$1") diff --git a/stubs/libcst/native.pyi b/stubs/libcst/native.pyi index 4741266b..2a84d6a7 100644 --- a/stubs/libcst/native.pyi +++ b/stubs/libcst/native.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/parser_config.pyi b/stubs/libcst_native/parser_config.pyi index 412da05e..1a095cfc 100644 --- a/stubs/libcst_native/parser_config.pyi +++ b/stubs/libcst_native/parser_config.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/token_type.pyi b/stubs/libcst_native/token_type.pyi index 4dacfad7..b51e8b48 100644 --- a/stubs/libcst_native/token_type.pyi +++ b/stubs/libcst_native/token_type.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/tokenize.pyi b/stubs/libcst_native/tokenize.pyi index 91c6d2e9..12270da5 100644 --- a/stubs/libcst_native/tokenize.pyi +++ b/stubs/libcst_native/tokenize.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/whitespace_parser.pyi b/stubs/libcst_native/whitespace_parser.pyi index b2b51b54..7c60189b 100644 --- a/stubs/libcst_native/whitespace_parser.pyi +++ b/stubs/libcst_native/whitespace_parser.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/whitespace_state.pyi b/stubs/libcst_native/whitespace_state.pyi index 82db9527..75264a14 100644 --- a/stubs/libcst_native/whitespace_state.pyi +++ b/stubs/libcst_native/whitespace_state.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and its affiliates. +# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. From ecc535c3361fcb5d148bcd3b43ef3eb8db2655d3 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 1 Feb 2022 12:46:56 +0000 Subject: [PATCH 213/632] Update readthedocs config (#637) * [readthedocs] bump python version * modernize readthedocs config --- .readthedocs.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index c76ca987..bf9cfe15 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,12 +5,16 @@ sphinx: formats: all +build: + os: ubuntu-20.04 + tools: + python: "3" + rust: "1.55" + python: - version: 3.7 install: - requirements: requirements.txt - requirements: requirements-dev.txt - method: pip path: . - system_packages: true From f0cca364b43ddd794407dba7499326ed15e29c3c Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 1 Feb 2022 14:12:18 +0000 Subject: [PATCH 214/632] Adopt Contributor Covenant CoC --- CODE_OF_CONDUCT.md | 81 ++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 78 insertions(+), 3 deletions(-) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 0f7ad8bf..83f431e8 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,5 +1,80 @@ # Code of Conduct -Facebook has adopted a Code of Conduct that we expect project participants to adhere to. -Please read the [full text](https://code.fb.com/codeofconduct/) -so that you can understand what actions will and will not be tolerated. +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic +address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a +professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when there is a +reasonable belief that an individual's behavior may have a negative impact on +the project or its community. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at . All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq From fb56fa6b8fdb361ff9d8dd46a831a8d26497d524 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 7 Feb 2022 11:52:29 +0000 Subject: [PATCH 215/632] [native] Make IntoPy conversion fallible (#639) * Make IntoPy fallible * Simplify test case so it works on 3.6 --- libcst/_parser/tests/test_parse_errors.py | 9 + native/libcst/src/nodes/expression.rs | 193 ++++++++++------------ native/libcst/src/nodes/mod.rs | 2 +- native/libcst/src/nodes/module.rs | 4 +- native/libcst/src/nodes/op.rs | 26 +-- native/libcst/src/nodes/statement.rs | 128 +++++++------- native/libcst/src/nodes/traits.rs | 79 +++++++++ native/libcst/src/nodes/whitespace.rs | 16 +- native/libcst/src/py.rs | 7 +- native/libcst_derive/src/into_py.rs | 60 ++----- native/libcst_derive/src/lib.rs | 2 +- 11 files changed, 289 insertions(+), 237 deletions(-) diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index 331dd81c..f36d08e7 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -6,8 +6,10 @@ from textwrap import dedent from typing import Callable +from unittest.mock import patch import libcst as cst +from libcst._nodes.base import CSTValidationError from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider, UnitTest @@ -172,3 +174,10 @@ class ParseErrorsTest(UnitTest): parse_fn() if not is_native(): self.assertEqual(str(cm.exception), expected) + + def test_native_fallible_into_py(self) -> None: + with patch("libcst._nodes.expression.Name._validate") as await_validate: + await_validate.side_effect = CSTValidationError("validate is broken") + with self.assertRaises(Exception) as e: + cst.parse_module("foo") + self.assertIsInstance(e.exception, (SyntaxError, cst.ParserSyntaxError)) diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 570c55ca..9345b6c7 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -19,13 +19,13 @@ use crate::{ }, }; #[cfg(feature = "py")] -use libcst_derive::IntoPy; +use libcst_derive::TryIntoPy; use libcst_derive::{Codegen, Inflate, ParenthesizedNode}; type TokenRef<'a> = Rc>; #[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Parameters<'a> { pub params: Vec>, pub star_arg: Option>, @@ -59,7 +59,7 @@ impl<'a> Inflate<'a> for Parameters<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum StarArg<'a> { Star(Box>), Param(Box>), @@ -120,7 +120,7 @@ impl<'a> Codegen<'a> for Parameters<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ParamSlash<'a> { pub comma: Option>, } @@ -144,7 +144,7 @@ impl<'a> Inflate<'a> for ParamSlash<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ParamStar<'a> { pub comma: Comma<'a>, } @@ -164,7 +164,7 @@ impl<'a> Inflate<'a> for ParamStar<'a> { } #[derive(Debug, Eq, PartialEq, Default, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Name<'a> { pub value: &'a str, pub lpar: Vec>, @@ -188,7 +188,7 @@ impl<'a> Codegen<'a> for Name<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Param<'a> { pub name: Name<'a>, pub annotation: Option>, @@ -281,7 +281,7 @@ impl<'a> Param<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Arg<'a> { pub value: Expression<'a>, pub keyword: Option>, @@ -345,7 +345,7 @@ impl<'a> WithComma<'a> for Arg<'a> { } #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct LeftParen<'a> { /// Any space that appears directly after this left parenthesis. pub whitespace_after: ParenthesizableWhitespace<'a>, @@ -371,7 +371,7 @@ impl<'a> Inflate<'a> for LeftParen<'a> { } #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct RightParen<'a> { /// Any space that appears directly before this right parenthesis. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -397,7 +397,7 @@ impl<'a> Inflate<'a> for RightParen<'a> { } #[derive(Debug, Eq, PartialEq, Clone, ParenthesizedNode, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum Expression<'a> { Name(Box>), Ellipsis(Box>), @@ -431,7 +431,7 @@ pub enum Expression<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Ellipsis<'a> { pub lpar: Vec>, pub rpar: Vec>, @@ -453,7 +453,7 @@ impl<'a> Inflate<'a> for Ellipsis<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Integer<'a> { /// A string representation of the integer, such as ``"100000"`` or /// ``"100_000"``. @@ -479,7 +479,7 @@ impl<'a> Inflate<'a> for Integer<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Float<'a> { /// A string representation of the floating point number, such as ```"0.05"``, /// ``".050"``, or ``"5e-2"``. @@ -505,7 +505,7 @@ impl<'a> Inflate<'a> for Float<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Imaginary<'a> { /// A string representation of the complex number, such as ``"2j"`` pub value: &'a str, @@ -530,7 +530,7 @@ impl<'a> Inflate<'a> for Imaginary<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Comparison<'a> { pub left: Box>, pub comparisons: Vec>, @@ -559,7 +559,7 @@ impl<'a> Inflate<'a> for Comparison<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct UnaryOperation<'a> { pub operator: UnaryOp<'a>, pub expression: Box>, @@ -587,7 +587,7 @@ impl<'a> Inflate<'a> for UnaryOperation<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct BinaryOperation<'a> { pub left: Box>, pub operator: BinaryOp<'a>, @@ -618,7 +618,7 @@ impl<'a> Inflate<'a> for BinaryOperation<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct BooleanOperation<'a> { pub left: Box>, pub operator: BooleanOp<'a>, @@ -649,7 +649,7 @@ impl<'a> Inflate<'a> for BooleanOperation<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Call<'a> { pub func: Box>, pub args: Vec>, @@ -707,7 +707,7 @@ impl<'a> Codegen<'a> for Call<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Attribute<'a> { pub value: Box>, pub attr: Name<'a>, @@ -738,7 +738,7 @@ impl<'a> Codegen<'a> for Attribute<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum NameOrAttribute<'a> { N(Box>), A(Box>), @@ -754,7 +754,7 @@ impl<'a> std::convert::From> for Expression<'a> { } #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ComparisonTarget<'a> { pub operator: CompOp<'a>, pub comparator: Expression<'a>, @@ -776,7 +776,7 @@ impl<'a> Inflate<'a> for ComparisonTarget<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct StarredElement<'a> { pub value: Box>, pub comma: Option>, @@ -895,7 +895,7 @@ impl<'a> std::convert::From> for Element<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Tuple<'a> { pub elements: Vec>, pub lpar: Vec>, @@ -936,7 +936,7 @@ impl<'a> Codegen<'a> for Tuple<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct GeneratorExp<'a> { pub elt: Box>, pub for_in: Box>, @@ -964,7 +964,7 @@ impl<'a> Inflate<'a> for GeneratorExp<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ListComp<'a> { pub elt: Box>, pub for_in: Box>, @@ -998,7 +998,7 @@ impl<'a> Inflate<'a> for ListComp<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct LeftSquareBracket<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1022,7 +1022,7 @@ impl<'a> Inflate<'a> for LeftSquareBracket<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct RightSquareBracket<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1046,7 +1046,7 @@ impl<'a> Inflate<'a> for RightSquareBracket<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct SetComp<'a> { pub elt: Box>, pub for_in: Box>, @@ -1080,7 +1080,7 @@ impl<'a> Codegen<'a> for SetComp<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct DictComp<'a> { pub key: Box>, pub value: Box>, @@ -1132,7 +1132,7 @@ impl<'a> Codegen<'a> for DictComp<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct LeftCurlyBrace<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1156,7 +1156,7 @@ impl<'a> Codegen<'a> for LeftCurlyBrace<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct RightCurlyBrace<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1180,7 +1180,7 @@ impl<'a> Codegen<'a> for RightCurlyBrace<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct CompFor<'a> { pub target: AssignTargetExpression<'a>, pub iter: Expression<'a>, @@ -1256,7 +1256,7 @@ impl<'a> Inflate<'a> for CompFor<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Asynchronous<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, } @@ -1269,7 +1269,7 @@ impl<'a> Codegen<'a> for Asynchronous<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct CompIf<'a> { pub test: Expression<'a>, pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -1303,7 +1303,7 @@ impl<'a> Inflate<'a> for CompIf<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct List<'a> { pub elements: Vec>, pub lbracket: LeftSquareBracket<'a>, @@ -1346,7 +1346,7 @@ impl<'a> Codegen<'a> for List<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Set<'a> { pub elements: Vec>, pub lbrace: LeftCurlyBrace<'a>, @@ -1388,7 +1388,7 @@ impl<'a> Codegen<'a> for Set<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Dict<'a> { pub elements: Vec>, pub lbrace: LeftCurlyBrace<'a>, @@ -1540,7 +1540,7 @@ impl<'a> WithComma<'a> for DictElement<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct StarredDictElement<'a> { pub value: Expression<'a>, pub comma: Option>, @@ -1577,14 +1577,14 @@ impl<'a> Codegen<'a> for StarredDictElement<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum BaseSlice<'a> { Index(Box>), Slice(Box>), } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Index<'a> { pub value: Expression<'a>, } @@ -1603,7 +1603,7 @@ impl<'a> Codegen<'a> for Index<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Slice<'a> { #[cfg_attr(feature = "py", no_py_default)] pub lower: Option>, @@ -1646,7 +1646,7 @@ impl<'a> Codegen<'a> for Slice<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct SubscriptElement<'a> { pub slice: BaseSlice<'a>, pub comma: Option>, @@ -1670,7 +1670,7 @@ impl<'a> Codegen<'a> for SubscriptElement<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Subscript<'a> { pub value: Box>, pub slice: Vec>, @@ -1718,7 +1718,7 @@ impl<'a> Codegen<'a> for Subscript<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct IfExp<'a> { pub test: Box>, pub body: Box>, @@ -1778,7 +1778,7 @@ impl<'a> Codegen<'a> for IfExp<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Lambda<'a> { pub params: Box>, pub body: Box>, @@ -1826,7 +1826,7 @@ impl<'a> Codegen<'a> for Lambda<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct From<'a> { pub item: Expression<'a>, pub whitespace_before_from: Option>, @@ -1864,7 +1864,7 @@ impl<'a> Inflate<'a> for From<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum YieldValue<'a> { Expression(Box>), From(Box>), @@ -1893,7 +1893,7 @@ impl<'a> YieldValue<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Yield<'a> { pub value: Option>>, pub lpar: Vec>, @@ -1936,7 +1936,7 @@ impl<'a> Codegen<'a> for Yield<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Await<'a> { pub expression: Box>, pub lpar: Vec>, @@ -1970,7 +1970,7 @@ impl<'a> Codegen<'a> for Await<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum String<'a> { Simple(SimpleString<'a>), Concatenated(ConcatenatedString<'a>), @@ -1988,7 +1988,7 @@ impl<'a> std::convert::From> for Expression<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ConcatenatedString<'a> { pub left: Box>, pub right: Box>, @@ -2026,7 +2026,7 @@ impl<'a> Codegen<'a> for ConcatenatedString<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct SimpleString<'a> { /// The texual representation of the string, including quotes, prefix /// characters, and any escape characters present in the original source code, @@ -2051,7 +2051,7 @@ impl<'a> Codegen<'a> for SimpleString<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct FormattedStringText<'a> { pub value: &'a str, } @@ -2069,7 +2069,7 @@ impl<'a> Codegen<'a> for FormattedStringText<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct FormattedStringExpression<'a> { pub expression: Expression<'a>, pub conversion: Option<&'a str>, @@ -2127,14 +2127,14 @@ impl<'a> Codegen<'a> for FormattedStringExpression<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum FormattedStringContent<'a> { Text(FormattedStringText<'a>), Expression(Box>), } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct FormattedString<'a> { pub parts: Vec>, pub start: &'a str, @@ -2165,7 +2165,7 @@ impl<'a> Codegen<'a> for FormattedString<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct NamedExpr<'a> { pub target: Box>, pub value: Box>, @@ -2211,43 +2211,45 @@ impl<'a> Inflate<'a> for NamedExpr<'a> { #[cfg(feature = "py")] mod py { - use pyo3::{types::PyModule, IntoPy}; + use pyo3::types::PyModule; use super::*; - use crate::OrElse; + use crate::nodes::traits::py::TryIntoPy; // TODO: this could be a derive helper attribute to override the python class name - impl<'a> IntoPy for Element<'a> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + impl<'a> TryIntoPy for Element<'a> { + fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { match self { - Self::Starred(s) => s.into_py(py), + Self::Starred(s) => s.try_into_py(py), Self::Simple { value, comma } => { - let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + let libcst = PyModule::import(py, "libcst")?; let kwargs = [ - Some(("value", value.into_py(py))), - comma.map(|x| ("comma", x.into_py(py))), + Some(("value", value.try_into_py(py)?)), + comma + .map(|x| x.try_into_py(py)) + .transpose()? + .map(|x| ("comma", x)), ] .iter() .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() .into_py_dict(py); - libcst + Ok(libcst .getattr("Element") .expect("no Element found in libcst") - .call((), Some(kwargs)) - .expect("conversion failed") - .into() + .call((), Some(kwargs))? + .into()) } } } } // TODO: this could be a derive helper attribute to override the python class name - impl<'a> IntoPy for DictElement<'a> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + impl<'a> TryIntoPy for DictElement<'a> { + fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { match self { - Self::Starred(s) => s.into_py(py), + Self::Starred(s) => s.try_into_py(py), Self::Simple { key, value, @@ -2256,48 +2258,35 @@ mod py { whitespace_before_colon, .. } => { - let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + let libcst = PyModule::import(py, "libcst")?; let kwargs = [ - Some(("key", key.into_py(py))), - Some(("value", value.into_py(py))), + Some(("key", key.try_into_py(py)?)), + Some(("value", value.try_into_py(py)?)), Some(( "whitespace_before_colon", - whitespace_before_colon.into_py(py), + whitespace_before_colon.try_into_py(py)?, )), - Some(("whitespace_after_colon", whitespace_after_colon.into_py(py))), - comma.map(|x| ("comma", x.into_py(py))), + Some(( + "whitespace_after_colon", + whitespace_after_colon.try_into_py(py)?, + )), + comma + .map(|x| x.try_into_py(py)) + .transpose()? + .map(|x| ("comma", x)), ] .iter() .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() .into_py_dict(py); - libcst + Ok(libcst .getattr("DictElement") .expect("no Element found in libcst") - .call((), Some(kwargs)) - .expect("conversion failed") - .into() + .call((), Some(kwargs))? + .into()) } } } } - - impl<'a> pyo3::conversion::IntoPy for Box> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - (*self).into_py(py) - } - } - - impl<'a> pyo3::conversion::IntoPy for Box> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - (*self).into_py(py) - } - } - - impl<'a> pyo3::conversion::IntoPy for Box> { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - (*self).into_py(py) - } - } } diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index 69463d99..58fa42d1 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -45,7 +45,7 @@ pub use module::Module; mod codegen; pub use codegen::{Codegen, CodegenState}; -mod traits; +pub(crate) mod traits; pub use traits::{Inflate, ParenthesizedNode, WithComma, WithLeadingLines}; pub(crate) mod inflate_helpers; diff --git a/native/libcst/src/nodes/module.rs b/native/libcst/src/nodes/module.rs index 661c7bb7..7bc42385 100644 --- a/native/libcst/src/nodes/module.rs +++ b/native/libcst/src/nodes/module.rs @@ -17,14 +17,14 @@ use crate::{ tokenizer::whitespace_parser::Config, }; #[cfg(feature = "py")] -use libcst_derive::IntoPy; +use libcst_derive::TryIntoPy; use super::traits::{Inflate, Result, WithLeadingLines}; type TokenRef<'a> = Rc>; #[derive(Debug, Eq, PartialEq)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Module<'a> { pub body: Vec>, pub header: Vec>, diff --git a/native/libcst/src/nodes/op.rs b/native/libcst/src/nodes/op.rs index 800f8ebd..d857e9a9 100644 --- a/native/libcst/src/nodes/op.rs +++ b/native/libcst/src/nodes/op.rs @@ -14,12 +14,12 @@ use crate::{ }, }; #[cfg(feature = "py")] -use libcst_derive::IntoPy; +use libcst_derive::TryIntoPy; type TokenRef<'a> = Rc>; #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Semicolon<'a> { /// Any space that appears directly before this semicolon. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -51,7 +51,7 @@ impl<'a> Inflate<'a> for Semicolon<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Comma<'a> { /// Any space that appears directly before this comma. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -95,7 +95,7 @@ impl<'a> Comma<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct AssignEqual<'a> { /// Any space that appears directly before this equal sign. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -129,7 +129,7 @@ impl<'a> Inflate<'a> for AssignEqual<'a> { } #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Dot<'a> { /// Any space that appears directly before this dot. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -175,7 +175,7 @@ impl<'a> Dot<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ImportStar {} impl<'a> Codegen<'a> for ImportStar { @@ -191,7 +191,7 @@ impl<'a> Inflate<'a> for ImportStar { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum UnaryOp<'a> { Plus { whitespace_after: ParenthesizableWhitespace<'a>, @@ -284,7 +284,7 @@ impl<'a> Inflate<'a> for UnaryOp<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum BooleanOp<'a> { And { whitespace_before: ParenthesizableWhitespace<'a>, @@ -358,7 +358,7 @@ impl<'a> Inflate<'a> for BooleanOp<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum BinaryOp<'a> { Add { whitespace_before: ParenthesizableWhitespace<'a>, @@ -718,7 +718,7 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum CompOp<'a> { LessThan { whitespace_before: ParenthesizableWhitespace<'a>, @@ -1040,7 +1040,7 @@ impl<'a> Inflate<'a> for CompOp<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Colon<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub whitespace_after: ParenthesizableWhitespace<'a>, @@ -1072,7 +1072,7 @@ impl<'a> Codegen<'a> for Colon<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum AugOp<'a> { AddAssign { whitespace_before: ParenthesizableWhitespace<'a>, @@ -1432,7 +1432,7 @@ impl<'a> Codegen<'a> for AugOp<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct BitOr<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub whitespace_after: ParenthesizableWhitespace<'a>, diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index d68ec055..65c702f3 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -26,14 +26,14 @@ use crate::{ LeftCurlyBrace, LeftSquareBracket, RightCurlyBrace, RightSquareBracket, }; #[cfg(feature = "py")] -use libcst_derive::IntoPy; +use libcst_derive::TryIntoPy; use libcst_derive::{Codegen, Inflate, ParenthesizedNode}; type TokenRef<'a> = Rc>; #[allow(clippy::large_enum_variant)] #[derive(Debug, Eq, PartialEq, Clone, Inflate, Codegen)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum Statement<'a> { Simple(SimpleStatementLine<'a>), Compound(CompoundStatement<'a>), @@ -49,7 +49,7 @@ impl<'a> WithLeadingLines<'a> for Statement<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] #[allow(clippy::large_enum_variant)] pub enum CompoundStatement<'a> { FunctionDef(FunctionDef<'a>), @@ -80,14 +80,14 @@ impl<'a> WithLeadingLines<'a> for CompoundStatement<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum Suite<'a> { IndentedBlock(IndentedBlock<'a>), SimpleStatementSuite(SimpleStatementSuite<'a>), } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct IndentedBlock<'a> { /// Sequence of statements belonging to this indented block. pub body: Vec>, @@ -178,7 +178,7 @@ impl<'a> Inflate<'a> for IndentedBlock<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct SimpleStatementSuite<'a> { /// Sequence of small statements. All but the last statement are required to have /// a semicolon. @@ -233,7 +233,7 @@ impl<'a> Codegen<'a> for SimpleStatementSuite<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct SimpleStatementLine<'a> { /// Sequence of small statements. All but the last statement are required to have /// a semicolon. @@ -276,7 +276,7 @@ impl<'a> Inflate<'a> for SimpleStatementLine<'a> { #[allow(dead_code, clippy::large_enum_variant)] #[derive(Debug, Eq, PartialEq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum SmallStatement<'a> { Pass(Pass<'a>), Break(Break<'a>), @@ -318,7 +318,7 @@ impl<'a> SmallStatement<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Pass<'a> { pub semicolon: Option>, } @@ -341,7 +341,7 @@ impl<'a> Inflate<'a> for Pass<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Break<'a> { pub semicolon: Option>, } @@ -364,7 +364,7 @@ impl<'a> Inflate<'a> for Break<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Continue<'a> { pub semicolon: Option>, } @@ -387,7 +387,7 @@ impl<'a> Inflate<'a> for Continue<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Expr<'a> { pub value: Expression<'a>, pub semicolon: Option>, @@ -412,7 +412,7 @@ impl<'a> Inflate<'a> for Expr<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Assign<'a> { pub targets: Vec>, pub value: Expression<'a>, @@ -447,7 +447,7 @@ impl<'a> Assign<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct AssignTarget<'a> { pub target: AssignTargetExpression<'a>, pub whitespace_before_equal: SimpleWhitespace<'a>, @@ -480,7 +480,7 @@ impl<'a> Inflate<'a> for AssignTarget<'a> { #[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum AssignTargetExpression<'a> { Name(Box>), Attribute(Box>), @@ -491,7 +491,7 @@ pub enum AssignTargetExpression<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Import<'a> { pub names: Vec>, pub semicolon: Option>, @@ -535,7 +535,7 @@ impl<'a> Import<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ImportFrom<'a> { #[cfg_attr(feature = "py", no_py_default)] pub module: Option>, @@ -651,7 +651,7 @@ impl<'a> ImportFrom<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ImportAlias<'a> { pub name: NameOrAttribute<'a>, pub asname: Option>, @@ -687,7 +687,7 @@ impl<'a> Codegen<'a> for ImportAlias<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct AsName<'a> { pub name: AssignTargetExpression<'a>, pub whitespace_before_as: ParenthesizableWhitespace<'a>, @@ -721,7 +721,7 @@ impl<'a> Inflate<'a> for AsName<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum ImportNames<'a> { Star(ImportStar), Aliases(Vec>), @@ -744,7 +744,7 @@ impl<'a> Codegen<'a> for ImportNames<'a> { } #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct FunctionDef<'a> { pub name: Name<'a>, pub params: Parameters<'a>, @@ -870,7 +870,7 @@ impl<'a> Inflate<'a> for FunctionDef<'a> { } #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Decorator<'a> { pub decorator: Expression<'a>, pub leading_lines: Vec>, @@ -913,7 +913,7 @@ impl<'a> Inflate<'a> for Decorator<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct If<'a> { /// The expression that, when evaluated, should give us a truthy value pub test: Expression<'a>, @@ -983,14 +983,14 @@ impl<'a> Inflate<'a> for If<'a> { #[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum OrElse<'a> { Elif(If<'a>), Else(Else<'a>), } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Else<'a> { pub body: Suite<'a>, /// Sequence of empty lines appearing before this compound statement line. @@ -1034,7 +1034,7 @@ impl<'a> Inflate<'a> for Else<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Annotation<'a> { pub annotation: Expression<'a>, pub whitespace_before_indicator: Option>, @@ -1075,7 +1075,7 @@ impl<'a> Inflate<'a> for Annotation<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct AnnAssign<'a> { pub target: AssignTargetExpression<'a>, pub annotation: Annotation<'a>, @@ -1121,7 +1121,7 @@ impl<'a> AnnAssign<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Return<'a> { pub value: Option>, pub whitespace_after_return: Option>, @@ -1173,7 +1173,7 @@ impl<'a> Return<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Assert<'a> { pub test: Expression<'a>, pub msg: Option>, @@ -1225,7 +1225,7 @@ impl<'a> Assert<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Raise<'a> { pub exc: Option>, pub cause: Option>, @@ -1288,7 +1288,7 @@ impl<'a> Raise<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct NameItem<'a> { pub name: Name<'a>, pub comma: Option>, @@ -1314,7 +1314,7 @@ impl<'a> NameItem<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Global<'a> { pub names: Vec>, pub whitespace_after_global: SimpleWhitespace<'a>, @@ -1355,7 +1355,7 @@ impl<'a> Global<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Nonlocal<'a> { pub names: Vec>, pub whitespace_after_nonlocal: SimpleWhitespace<'a>, @@ -1396,7 +1396,7 @@ impl<'a> Nonlocal<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct For<'a> { pub target: AssignTargetExpression<'a>, pub iter: Expression<'a>, @@ -1489,7 +1489,7 @@ impl<'a> Inflate<'a> for For<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct While<'a> { pub test: Expression<'a>, pub body: Suite<'a>, @@ -1543,7 +1543,7 @@ impl<'a> Inflate<'a> for While<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ClassDef<'a> { pub name: Name<'a>, pub body: Suite<'a>, @@ -1650,7 +1650,7 @@ impl<'a> ClassDef<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Finally<'a> { pub body: Suite<'a>, pub leading_lines: Vec>, @@ -1691,7 +1691,7 @@ impl<'a> Inflate<'a> for Finally<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ExceptHandler<'a> { pub body: Suite<'a>, pub r#type: Option>, @@ -1752,7 +1752,7 @@ impl<'a> Inflate<'a> for ExceptHandler<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ExceptStarHandler<'a> { pub body: Suite<'a>, pub r#type: Expression<'a>, @@ -1815,7 +1815,7 @@ impl<'a> Inflate<'a> for ExceptStarHandler<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Try<'a> { pub body: Suite<'a>, pub handlers: Vec>, @@ -1868,7 +1868,7 @@ impl<'a> Inflate<'a> for Try<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct TryStar<'a> { pub body: Suite<'a>, pub handlers: Vec>, @@ -1921,7 +1921,7 @@ impl<'a> Inflate<'a> for TryStar<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct AugAssign<'a> { pub target: AssignTargetExpression<'a>, pub operator: AugOp<'a>, @@ -1958,7 +1958,7 @@ impl<'a> AugAssign<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct WithItem<'a> { pub item: Expression<'a>, pub asname: Option>, @@ -2000,7 +2000,7 @@ impl<'a> WithComma<'a> for WithItem<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct With<'a> { pub items: Vec>, pub body: Suite<'a>, @@ -2114,7 +2114,7 @@ impl<'a> Inflate<'a> for With<'a> { } #[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum DelTargetExpression<'a> { Name(Box>), Attribute(Box>), @@ -2144,7 +2144,7 @@ impl<'a> std::convert::From> for Element<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Del<'a> { pub target: DelTargetExpression<'a>, pub whitespace_after_del: SimpleWhitespace<'a>, @@ -2181,7 +2181,7 @@ impl<'a> Del<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Match<'a> { pub subject: Expression<'a>, pub cases: Vec>, @@ -2257,7 +2257,7 @@ impl<'a> Inflate<'a> for Match<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchCase<'a> { pub pattern: MatchPattern<'a>, pub guard: Option>, @@ -2322,7 +2322,7 @@ impl<'a> Inflate<'a> for MatchCase<'a> { #[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum MatchPattern<'a> { Value(MatchValue<'a>), Singleton(MatchSingleton<'a>), @@ -2334,7 +2334,7 @@ pub enum MatchPattern<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchValue<'a> { pub value: Expression<'a>, } @@ -2373,7 +2373,7 @@ impl<'a> Inflate<'a> for MatchValue<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchSingleton<'a> { pub value: Name<'a>, } @@ -2413,14 +2413,14 @@ impl<'a> Inflate<'a> for MatchSingleton<'a> { #[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum MatchSequence<'a> { MatchList(MatchList<'a>), MatchTuple(MatchTuple<'a>), } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchList<'a> { pub patterns: Vec>, pub lbracket: Option>, @@ -2466,7 +2466,7 @@ impl<'a> Inflate<'a> for MatchList<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchTuple<'a> { pub patterns: Vec>, pub lpar: Vec>, @@ -2505,7 +2505,7 @@ impl<'a> Inflate<'a> for MatchTuple<'a> { #[allow(clippy::large_enum_variant)] #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum StarrableMatchSequenceElement<'a> { Simple(MatchSequenceElement<'a>), Starred(MatchStar<'a>), @@ -2541,7 +2541,7 @@ impl<'a> WithComma<'a> for StarrableMatchSequenceElement<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchSequenceElement<'a> { pub value: MatchPattern<'a>, pub comma: Option>, @@ -2582,7 +2582,7 @@ impl<'a> WithComma<'a> for MatchSequenceElement<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchStar<'a> { pub name: Option>, pub comma: Option>, @@ -2636,7 +2636,7 @@ impl<'a> WithComma<'a> for MatchStar<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchMapping<'a> { pub elements: Vec>, pub rest: Option>, @@ -2701,7 +2701,7 @@ impl<'a> Inflate<'a> for MatchMapping<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchMappingElement<'a> { pub key: Expression<'a>, pub pattern: MatchPattern<'a>, @@ -2756,7 +2756,7 @@ impl<'a> WithComma<'a> for MatchMappingElement<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchClass<'a> { pub cls: NameOrAttribute<'a>, pub patterns: Vec>, @@ -2833,7 +2833,7 @@ impl<'a> Inflate<'a> for MatchClass<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchKeywordElement<'a> { pub key: Name<'a>, pub pattern: MatchPattern<'a>, @@ -2887,7 +2887,7 @@ impl<'a> WithComma<'a> for MatchKeywordElement<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchAs<'a> { pub pattern: Option>, pub name: Option>, @@ -2939,7 +2939,7 @@ impl<'a> Inflate<'a> for MatchAs<'a> { } #[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchOrElement<'a> { pub pattern: MatchPattern<'a>, pub separator: Option>, @@ -2964,7 +2964,7 @@ impl<'a> Inflate<'a> for MatchOrElement<'a> { } #[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct MatchOr<'a> { pub patterns: Vec>, pub lpar: Vec>, diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index aabd27ef..f8859641 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -84,3 +84,82 @@ impl<'a, T: Inflate<'a>> Inflate<'a> for Vec { self.into_iter().map(|item| item.inflate(config)).collect() } } +#[cfg(feature = "py")] +pub mod py { + use pyo3::{types::PyTuple, AsPyPointer, IntoPy, PyObject, PyResult, Python}; + + // TODO: replace with upstream implementation once + // https://github.com/PyO3/pyo3/issues/1813 is resolved + pub trait TryIntoPy: Sized { + fn try_into_py(self, py: Python) -> PyResult; + } + + // I wish: + // impl> TryIntoPy for T { + // fn try_into_py(self, py: Python) -> PyResult { + // Ok(self.into_py(py)) + // } + // } + + impl TryIntoPy for bool { + fn try_into_py(self, py: Python) -> PyResult { + Ok(self.into_py(py)) + } + } + + impl> TryIntoPy for Box + where + T: TryIntoPy, + { + fn try_into_py(self, py: Python) -> PyResult { + (*self).try_into_py(py) + } + } + + impl TryIntoPy for Option + where + T: TryIntoPy, + { + fn try_into_py(self, py: Python) -> PyResult { + Ok(match self { + None => py.None(), + Some(x) => x.try_into_py(py)?, + }) + } + } + + impl TryIntoPy for Vec + where + T: TryIntoPy, + { + fn try_into_py(self, py: Python) -> PyResult { + let converted = self + .into_iter() + .map(|x| x.try_into_py(py)) + .collect::>>()? + .into_iter(); + Ok(PyTuple::new(py, converted).into()) + } + } + + impl TryIntoPy for PyTuple { + fn try_into_py(self, py: Python) -> PyResult { + Ok(self.into_py(py)) + } + } + + impl<'a> TryIntoPy for &'a str { + fn try_into_py(self, py: Python) -> PyResult { + Ok(self.into_py(py)) + } + } + + impl TryIntoPy for &'_ T + where + T: AsPyPointer, + { + fn try_into_py(self, py: Python) -> PyResult { + Ok(self.into_py(py)) + } + } +} diff --git a/native/libcst/src/nodes/whitespace.rs b/native/libcst/src/nodes/whitespace.rs index 59b314c2..474ee384 100644 --- a/native/libcst/src/nodes/whitespace.rs +++ b/native/libcst/src/nodes/whitespace.rs @@ -4,12 +4,12 @@ // LICENSE file in the root directory of this source tree. #[cfg(feature = "py")] -use libcst_derive::IntoPy; +use libcst_derive::TryIntoPy; use super::{Codegen, CodegenState}; #[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct SimpleWhitespace<'a>(pub &'a str); impl<'a> Codegen<'a> for SimpleWhitespace<'a> { @@ -19,7 +19,7 @@ impl<'a> Codegen<'a> for SimpleWhitespace<'a> { } #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Comment<'a>(pub &'a str); impl<'a> Default for Comment<'a> { @@ -35,7 +35,7 @@ impl<'a> Codegen<'a> for Comment<'a> { } #[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct Newline<'a>(pub Option<&'a str>, pub Fakeness); #[derive(Debug, PartialEq, Eq, Clone)] @@ -64,7 +64,7 @@ impl<'a> Codegen<'a> for Newline<'a> { } #[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct TrailingWhitespace<'a> { pub whitespace: SimpleWhitespace<'a>, pub comment: Option>, @@ -82,7 +82,7 @@ impl<'a> Codegen<'a> for TrailingWhitespace<'a> { } #[derive(Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct EmptyLine<'a> { pub indent: bool, pub whitespace: SimpleWhitespace<'a>, @@ -131,7 +131,7 @@ impl<'a> EmptyLine<'a> { } #[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub struct ParenthesizedWhitespace<'a> { pub first_line: TrailingWhitespace<'a>, pub empty_lines: Vec>, @@ -153,7 +153,7 @@ impl<'a> Codegen<'a> for ParenthesizedWhitespace<'a> { } #[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(IntoPy))] +#[cfg_attr(feature = "py", derive(TryIntoPy))] pub enum ParenthesizableWhitespace<'a> { SimpleWhitespace(SimpleWhitespace<'a>), ParenthesizedWhitespace(ParenthesizedWhitespace<'a>), diff --git a/native/libcst/src/py.rs b/native/libcst/src/py.rs index 4cc30fbe..960fac3a 100644 --- a/native/libcst/src/py.rs +++ b/native/libcst/src/py.rs @@ -3,6 +3,7 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree +use crate::nodes::traits::py::TryIntoPy; use pyo3::prelude::*; #[pymodule] @@ -11,19 +12,19 @@ pub fn libcst_native(_py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m)] fn parse_module(source: String, encoding: Option<&str>) -> PyResult { let m = crate::parse_module(source.as_str(), encoding)?; - Python::with_gil(|py| Ok(m.into_py(py))) + Python::with_gil(|py| m.try_into_py(py)) } #[pyfn(m)] fn parse_expression(source: String) -> PyResult { let expr = crate::parse_expression(source.as_str())?; - Python::with_gil(|py| Ok(expr.into_py(py))) + Python::with_gil(|py| expr.try_into_py(py)) } #[pyfn(m)] fn parse_statement(source: String) -> PyResult { let stm = crate::parse_statement(source.as_str())?; - Python::with_gil(|py| Ok(stm.into_py(py))) + Python::with_gil(|py| stm.try_into_py(py)) } Ok(()) diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs index d37a92d3..e1d85132 100644 --- a/native/libcst_derive/src/into_py.rs +++ b/native/libcst_derive/src/into_py.rs @@ -38,14 +38,13 @@ fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { let kwargs_toks = fields_to_kwargs(&var.fields, true); toks.push(quote! { Self::#varname { #(#fieldnames,)* .. } => { - let libcst = pyo3::types::PyModule::import(py, "libcst").expect("libcst couldn't be imported"); + let libcst = pyo3::types::PyModule::import(py, "libcst")?; let kwargs = #kwargs_toks ; - libcst + Ok(libcst .getattr(stringify!(#varname)) .expect(stringify!(no #varname found in libcst)) - .call((), Some(kwargs)) - .expect(stringify!(conversion failed for #varname)) - .into() + .call((), Some(kwargs))? + .into()) } }) } @@ -58,7 +57,7 @@ fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { } Fields::Unnamed(_) => { toks.push(quote! { - Self::#varname(x, ..) => x.into_py(py), + Self::#varname(x, ..) => x.try_into_py(py), }); } } @@ -68,8 +67,8 @@ fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { let gen = quote! { use pyo3::types::IntoPyDict as _; #[automatically_derived] - impl#generics pyo3::conversion::IntoPy for #ident #generics { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + impl#generics crate::nodes::traits::py::TryIntoPy for #ident #generics { + fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { match self { #(#toks)* } @@ -86,16 +85,15 @@ fn impl_into_py_struct(ast: &DeriveInput, e: &DataStruct) -> TokenStream { let gen = quote! { use pyo3::types::IntoPyDict as _; #[automatically_derived] - impl#generics pyo3::conversion::IntoPy for #ident #generics { - fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { - let libcst = pyo3::types::PyModule::import(py, "libcst").expect("libcst couldn't be imported"); + impl#generics crate::nodes::traits::py::TryIntoPy for #ident #generics { + fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { + let libcst = pyo3::types::PyModule::import(py, "libcst")?; let kwargs = #kwargs_toks ; - libcst + Ok(libcst .getattr(stringify!(#ident)) .expect(stringify!(no #ident found in libcst)) - .call((), Some(kwargs)) - .expect(stringify!(conversion failed for #ident)) - .into() + .call((), Some(kwargs))? + .into()) } } }; @@ -108,8 +106,6 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt let mut rust_varnames = vec![]; let mut optional_py_varnames = vec![]; let mut optional_rust_varnames = vec![]; - let mut vec_py_varnames = vec![]; - let mut vec_rust_varnames = vec![]; match &fields { Fields::Named(FieldsNamed { named, .. }) => { for field in named.iter() { @@ -140,23 +136,12 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt } } } - if let Type::Path(TypePath { path, .. }) = &field.ty { - if let Some(first) = path.segments.first() { - if first.ident == "Vec" { - vec_py_varnames.push(pyname); - vec_rust_varnames.push(rustname); - continue; - } - } - } py_varnames.push(pyname); rust_varnames.push(rustname); } } } - empty_kwargs = py_varnames.is_empty() - && optional_py_varnames.is_empty() - && vec_py_varnames.is_empty(); + empty_kwargs = py_varnames.is_empty() && optional_py_varnames.is_empty() } Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { if unnamed.first().is_some() { @@ -171,27 +156,16 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt } }; let kwargs_pairs = quote! { - #(Some((stringify!(#py_varnames), #rust_varnames.into_py(py))),)* + #(Some((stringify!(#py_varnames), #rust_varnames.try_into_py(py)?)),)* }; let optional_pairs = quote! { - #(#optional_rust_varnames.map(|x| (stringify!(#optional_py_varnames), x.into_py(py))),)* - }; - let vec_pairs = quote! { - #(Some(( - stringify!(#vec_py_varnames), - pyo3::IntoPy::::into_py( - pyo3::types::PyTuple::new( - py, - #vec_rust_varnames.into_iter().map(|x| x.into_py(py)), - ), - py, - ))),)* + #(#optional_rust_varnames.map(|x| x.try_into_py(py)).transpose()?.map(|x| (stringify!(#optional_py_varnames), x)),)* }; if empty_kwargs { quote! { pyo3::types::PyDict::new(py) } } else { quote! { - [ #kwargs_pairs #optional_pairs #vec_pairs ] + [ #kwargs_pairs #optional_pairs ] .iter() .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) diff --git a/native/libcst_derive/src/lib.rs b/native/libcst_derive/src/lib.rs index 12ce4ff1..3686eaa2 100644 --- a/native/libcst_derive/src/lib.rs +++ b/native/libcst_derive/src/lib.rs @@ -30,7 +30,7 @@ pub fn parenthesized_node_codegen(input: TokenStream) -> TokenStream { impl_codegen(&syn::parse(input).unwrap()) } -#[proc_macro_derive(IntoPy, attributes(skip_py, no_py_default))] +#[proc_macro_derive(TryIntoPy, attributes(skip_py, no_py_default))] pub fn into_py(input: TokenStream) -> TokenStream { impl_into_py(&syn::parse(input).unwrap()) } From 1aa40f799b9325c2f5b517c9b24b518588374d45 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 9 Feb 2022 17:06:12 -0800 Subject: [PATCH 216/632] Autoformat ApplyTypeAnnotationsVisitor (#642) I find it difficult sometimes to read method and function signatures when there are multiple arguments with type annotations - left-aligning the arguments makes it much easier for me to skim and see, using mostly my automatic visual resoning, - the argument names - the argument types - the return type Without this, I feel like I'm trying to run a parser in my head, which is not as fast and distracts me from code-skimming. This change was generated using thte new AddTrailingCommas codemod (which I'll put in a separate PR) via the command ``` python -m libcst.tool codemod add_trailing_commas.AddTrailingCommas ./libcst/codemod/visitors/_apply_type_annotations.py ``` Wait for CI - this is pure formatting, it should be very safe --- .../visitors/_apply_type_annotations.py | 240 ++++++++++++++---- 1 file changed, 191 insertions(+), 49 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index ac954d2f..5acc80a8 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -30,7 +30,9 @@ StarParamType = Union[ ] -def _get_import_alias_names(import_aliases: Sequence[cst.ImportAlias]) -> Set[str]: +def _get_import_alias_names( + import_aliases: Sequence[cst.ImportAlias], +) -> Set[str]: import_names = set() for imported_name in import_aliases: asname = imported_name.asname @@ -41,7 +43,9 @@ def _get_import_alias_names(import_aliases: Sequence[cst.ImportAlias]) -> Set[st return import_names -def _get_import_names(imports: Sequence[Union[cst.Import, cst.ImportFrom]]) -> Set[str]: +def _get_import_names( + imports: Sequence[Union[cst.Import, cst.ImportFrom]], +) -> Set[str]: import_names = set() for _import in imports: if isinstance(_import, cst.Import): @@ -53,17 +57,23 @@ def _get_import_names(imports: Sequence[Union[cst.Import, cst.ImportFrom]]) -> S return import_names -def _is_set(x: Union[None, cst.CSTNode, cst.MaybeSentinel]) -> bool: +def _is_set( + x: Union[None, cst.CSTNode, cst.MaybeSentinel], +) -> bool: return x is not None and x != cst.MaybeSentinel.DEFAULT -def _get_string_value(node: cst.SimpleString) -> str: +def _get_string_value( + node: cst.SimpleString, +) -> str: s = node.value c = s[-1] return s[s.index(c) : -1] -def _find_generic_base(node: cst.ClassDef) -> Optional[cst.Arg]: +def _find_generic_base( + node: cst.ClassDef, +) -> Optional[cst.Arg]: for b in node.bases: if m.matches(b.value, m.Subscript(value=m.Name("Generic"))): return b @@ -79,13 +89,24 @@ class FunctionKey: star_kwarg: bool @classmethod - def make(cls, name: str, params: cst.Parameters) -> "FunctionKey": + def make( + cls, + name: str, + params: cst.Parameters, + ) -> "FunctionKey": pos = len(params.params) kwonly = ",".join(sorted(x.name.value for x in params.kwonly_params)) posonly = len(params.posonly_params) star_arg = _is_set(params.star_arg) star_kwarg = _is_set(params.star_kwarg) - return cls(name, pos, kwonly, posonly, star_arg, star_kwarg) + return cls( + name, + pos, + kwonly, + posonly, + star_arg, + star_kwarg, + ) @dataclass(frozen=True) @@ -104,7 +125,11 @@ class TypeCollector(m.MatcherDecoratableVisitor): QualifiedNameProvider, ) - def __init__(self, existing_imports: Set[str], context: CodemodContext) -> None: + def __init__( + self, + existing_imports: Set[str], + context: CodemodContext, + ) -> None: super().__init__() # Qualifier for storing the canonical name of the current function. self.qualifier: List[str] = [] @@ -118,7 +143,10 @@ class TypeCollector(m.MatcherDecoratableVisitor): self.typevars: Dict[str, cst.Assign] = {} self.annotation_names: Set[str] = set() - def visit_ClassDef(self, node: cst.ClassDef) -> None: + def visit_ClassDef( + self, + node: cst.ClassDef, + ) -> None: self.qualifier.append(node.name.value) new_bases = [] for base in node.bases: @@ -138,10 +166,16 @@ class TypeCollector(m.MatcherDecoratableVisitor): self.class_definitions[node.name.value] = node.with_changes(bases=new_bases) - def leave_ClassDef(self, original_node: cst.ClassDef) -> None: + def leave_ClassDef( + self, + original_node: cst.ClassDef, + ) -> None: self.qualifier.pop() - def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: + def visit_FunctionDef( + self, + node: cst.FunctionDef, + ) -> bool: self.qualifier.append(node.name.value) returns = node.returns return_annotation = ( @@ -157,10 +191,16 @@ class TypeCollector(m.MatcherDecoratableVisitor): # pyi files don't support inner functions, return False to stop the traversal. return False - def leave_FunctionDef(self, original_node: cst.FunctionDef) -> None: + def leave_FunctionDef( + self, + original_node: cst.FunctionDef, + ) -> None: self.qualifier.pop() - def visit_AnnAssign(self, node: cst.AnnAssign) -> bool: + def visit_AnnAssign( + self, + node: cst.AnnAssign, + ) -> bool: name = get_full_name_for_node(node.target) if name is not None: self.qualifier.append(name) @@ -168,18 +208,30 @@ class TypeCollector(m.MatcherDecoratableVisitor): self.attribute_annotations[".".join(self.qualifier)] = annotation_value return True - def leave_AnnAssign(self, original_node: cst.AnnAssign) -> None: + def leave_AnnAssign( + self, + original_node: cst.AnnAssign, + ) -> None: self.qualifier.pop() - def visit_Assign(self, node: cst.Assign) -> None: + def visit_Assign( + self, + node: cst.Assign, + ) -> None: self.current_assign = node - def leave_Assign(self, original_node: cst.Assign) -> None: + def leave_Assign( + self, + original_node: cst.Assign, + ) -> None: self.current_assign = None @m.call_if_inside(m.Assign()) @m.visit(m.Call(func=m.Name("TypeVar"))) - def record_typevar(self, node: cst.Call) -> None: + def record_typevar( + self, + node: cst.Call, + ) -> None: # pyre-ignore current_assign is never None here name = get_full_name_for_node(self.current_assign.targets[0].target) if name: @@ -188,13 +240,19 @@ class TypeCollector(m.MatcherDecoratableVisitor): self._handle_qualification_and_should_qualify("typing.TypeVar") self.current_assign = None - def leave_Module(self, original_node: cst.Module) -> None: + def leave_Module( + self, + original_node: cst.Module, + ) -> None: # Filter out unused typevars self.typevars = { k: v for k, v in self.typevars.items() if k in self.annotation_names } - def _get_unique_qualified_name(self, node: cst.CSTNode) -> str: + def _get_unique_qualified_name( + self, + node: cst.CSTNode, + ) -> str: name = None names = [q.name for q in self.get_metadata(QualifiedNameProvider, node)] if len(names) == 0: @@ -221,7 +279,10 @@ class TypeCollector(m.MatcherDecoratableVisitor): dequalified_node = node.attr if isinstance(node, cst.Attribute) else node return qualified_name, dequalified_node - def _module_and_target(self, qualified_name: str) -> Tuple[str, str]: + def _module_and_target( + self, + qualified_name: str, + ) -> Tuple[str, str]: relative_prefix = "" while qualified_name.startswith("."): relative_prefix += "." @@ -233,7 +294,10 @@ class TypeCollector(m.MatcherDecoratableVisitor): qualifier, target = split return (relative_prefix + qualifier, target) - def _handle_qualification_and_should_qualify(self, qualified_name: str) -> bool: + def _handle_qualification_and_should_qualify( + self, + qualified_name: str, + ) -> bool: """ Based on a qualified name and the existing module imports, record that we need to add an import if necessary and return whether or not we @@ -248,7 +312,11 @@ class TypeCollector(m.MatcherDecoratableVisitor): elif module in self.existing_imports: return True else: - AddImportsVisitor.add_needed_import(self.context, module, target) + AddImportsVisitor.add_needed_import( + self.context, + module, + target, + ) return False return False @@ -274,7 +342,10 @@ class TypeCollector(m.MatcherDecoratableVisitor): else: return dequalified_node - def _handle_Index(self, slice: cst.Index) -> cst.Index: + def _handle_Index( + self, + slice: cst.Index, + ) -> cst.Index: value = slice.value if isinstance(value, cst.Subscript): return slice.with_changes(value=self._handle_Subscript(value)) @@ -285,7 +356,10 @@ class TypeCollector(m.MatcherDecoratableVisitor): self.annotation_names.add(_get_string_value(value)) return slice - def _handle_Subscript(self, node: cst.Subscript) -> cst.Subscript: + def _handle_Subscript( + self, + node: cst.Subscript, + ) -> cst.Subscript: value = node.value if isinstance(value, NAME_OR_ATTRIBUTE): new_node = node.with_changes(value=self._handle_NameOrAttribute(value)) @@ -320,7 +394,10 @@ class TypeCollector(m.MatcherDecoratableVisitor): else: return new_node - def _handle_Annotation(self, annotation: cst.Annotation) -> cst.Annotation: + def _handle_Annotation( + self, + annotation: cst.Annotation, + ) -> cst.Annotation: node = annotation.annotation if isinstance(node, cst.SimpleString): self.annotation_names.add(_get_string_value(node)) @@ -332,8 +409,13 @@ class TypeCollector(m.MatcherDecoratableVisitor): else: raise ValueError(f"Unexpected annotation node: {node}") - def _handle_Parameters(self, parameters: cst.Parameters) -> cst.Parameters: - def update_annotations(parameters: Sequence[cst.Param]) -> List[cst.Param]: + def _handle_Parameters( + self, + parameters: cst.Parameters, + ) -> cst.Parameters: + def update_annotations( + parameters: Sequence[cst.Param], + ) -> List[cst.Param]: updated_parameters = [] for parameter in list(parameters): annotation = parameter.annotation @@ -477,7 +559,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): strict_annotation_matching, ) - def transform_module_impl(self, tree: cst.Module) -> cst.Module: + def transform_module_impl( + self, + tree: cst.Module, + ) -> cst.Module: """ Collect type annotations from all stubs and apply them to ``tree``. @@ -520,7 +605,13 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): tree_with_imports = AddImportsVisitor( context=self.context, imports=( - [ImportItem("__future__", "annotations", None)] + [ + ImportItem( + "__future__", + "annotations", + None, + ) + ] if self.use_future_annotations else () ), @@ -545,7 +636,11 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.annotation_counts.global_annotations += 1 else: self.annotation_counts.attribute_annotations += 1 - return cst.AnnAssign(cst.Name(name), annotation, value) + return cst.AnnAssign( + cst.Name(name), + annotation, + value, + ) def _apply_annotation_to_parameter( self, @@ -571,7 +666,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return ".".join(self.qualifier) def _annotate_single_target( - self, node: cst.Assign, updated_node: cst.Assign + self, + node: cst.Assign, + updated_node: cst.Assign, ) -> Union[cst.Assign, cst.AnnAssign]: only_target = node.targets[0].target if isinstance(only_target, (cst.Tuple, cst.List)): @@ -604,7 +701,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return updated_node def _split_module( - self, module: cst.Module, updated_module: cst.Module + self, + module: cst.Module, + updated_module: cst.Module, ) -> Tuple[ List[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], List[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], @@ -627,7 +726,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): list(updated_module.body[import_add_location:]), ) - def _add_to_toplevel_annotations(self, name: str) -> None: + def _add_to_toplevel_annotations( + self, + name: str, + ) -> None: self.qualifier.append(name) if self._qualifier_name() in self.annotations.attribute_annotations: annotation = self.annotations.attribute_annotations[self._qualifier_name()] @@ -635,7 +737,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.qualifier.pop() def _update_parameters( - self, annotations: FunctionAnnotation, updated_node: cst.FunctionDef + self, + annotations: FunctionAnnotation, + updated_node: cst.FunctionDef, ) -> cst.Parameters: # Update params and default params with annotations # Don't override existing annotations or default values unless asked @@ -716,7 +820,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): """Check that function annotations on both signatures are compatible.""" def compatible( - p: Optional[cst.Annotation], q: Optional[cst.Annotation] + p: Optional[cst.Annotation], + q: Optional[cst.Annotation], ) -> bool: if self.overwrite_existing_annotations or not _is_set(p) or not _is_set(q): return True @@ -726,7 +831,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return True return p.annotation.deep_equals(q.annotation) # pyre-ignore[16] - def match_posargs(ps: Sequence[cst.Param], qs: Sequence[cst.Param]) -> bool: + def match_posargs( + ps: Sequence[cst.Param], + qs: Sequence[cst.Param], + ) -> bool: if len(ps) != len(qs): return False for p, q in zip(ps, qs): @@ -736,7 +844,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return False return True - def match_kwargs(ps: Sequence[cst.Param], qs: Sequence[cst.Param]) -> bool: + def match_kwargs( + ps: Sequence[cst.Param], + qs: Sequence[cst.Param], + ) -> bool: ps_dict = {x.name.value: x for x in ps} qs_dict = {x.name.value: x for x in qs} if set(ps_dict.keys()) != set(qs_dict.keys()): @@ -746,10 +857,16 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return False return True - def match_star(p: StarParamType, q: StarParamType) -> bool: + def match_star( + p: StarParamType, + q: StarParamType, + ) -> bool: return _is_set(p) == _is_set(q) - def match_params(f: cst.FunctionDef, g: FunctionAnnotation) -> bool: + def match_params( + f: cst.FunctionDef, + g: FunctionAnnotation, + ) -> bool: p, q = f.params, g.parameters return ( match_posargs(p.params, q.params) @@ -759,7 +876,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): and match_star(p.star_kwarg, q.star_kwarg) ) - def match_return(f: cst.FunctionDef, g: FunctionAnnotation) -> bool: + def match_return( + f: cst.FunctionDef, + g: FunctionAnnotation, + ) -> bool: return compatible(f.returns, g.returns) return match_params(function, annotations) and match_return( @@ -768,12 +888,17 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # transform API methods - def visit_ClassDef(self, node: cst.ClassDef) -> None: + def visit_ClassDef( + self, + node: cst.ClassDef, + ) -> None: self.qualifier.append(node.name.value) self.visited_classes.add(node.name.value) def leave_ClassDef( - self, original_node: cst.ClassDef, updated_node: cst.ClassDef + self, + original_node: cst.ClassDef, + updated_node: cst.ClassDef, ) -> cst.ClassDef: cls_name = ".".join(self.qualifier) self.qualifier.pop() @@ -787,13 +912,18 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return updated_node.with_changes(bases=new_bases) return updated_node - def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: + def visit_FunctionDef( + self, + node: cst.FunctionDef, + ) -> bool: self.qualifier.append(node.name.value) # pyi files don't support inner functions, return False to stop the traversal. return False def leave_FunctionDef( - self, original_node: cst.FunctionDef, updated_node: cst.FunctionDef + self, + original_node: cst.FunctionDef, + updated_node: cst.FunctionDef, ) -> cst.FunctionDef: key = FunctionKey.make(self._qualifier_name(), updated_node.params) self.qualifier.pop() @@ -818,12 +948,18 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return updated_node.with_changes(params=new_parameters) return updated_node - def visit_Assign(self, node: cst.Assign) -> None: + def visit_Assign( + self, + node: cst.Assign, + ) -> None: self.current_assign = node @m.call_if_inside(m.Assign()) @m.visit(m.Call(func=m.Name("TypeVar"))) - def record_typevar(self, node: cst.Call) -> None: + def record_typevar( + self, + node: cst.Call, + ) -> None: # pyre-ignore current_assign is never None here name = get_full_name_for_node(self.current_assign.targets[0].target) if name: @@ -836,7 +972,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.current_assign = None def leave_Assign( - self, original_node: cst.Assign, updated_node: cst.Assign + self, + original_node: cst.Assign, + updated_node: cst.Assign, ) -> Union[cst.Assign, cst.AnnAssign]: self.current_assign = None @@ -855,13 +993,17 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return self._annotate_single_target(original_node, updated_node) def leave_ImportFrom( - self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom + self, + original_node: cst.ImportFrom, + updated_node: cst.ImportFrom, ) -> cst.ImportFrom: self.import_statements.append(original_node) return updated_node def leave_Module( - self, original_node: cst.Module, updated_node: cst.Module + self, + original_node: cst.Module, + updated_node: cst.Module, ) -> cst.Module: fresh_class_definitions = [ definition From 3af6820ca7f29a5817e986b171a6f9840462c397 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 10 Feb 2022 18:21:54 +0000 Subject: [PATCH 217/632] Fix space validation for AsName and Await (#641) * Fix space validation for AsName and Await * Update libcst/_nodes/tests/test_import.py Co-authored-by: Steven Troxler --- libcst/_nodes/expression.py | 7 +++++- libcst/_nodes/statement.py | 34 ++++++++++++++++++++++++------ libcst/_nodes/tests/test_await.py | 8 +++++++ libcst/_nodes/tests/test_import.py | 33 +++++++++++++++++++++++++++++ libcst/_nodes/tests/test_try.py | 24 +++++++++++++++------ 5 files changed, 93 insertions(+), 13 deletions(-) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 6b86a8b2..b65eb50c 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -2394,7 +2394,12 @@ class Await(BaseExpression): # Validate any super-class stuff, whatever it may be. super(Await, self)._validate() # Make sure we don't run identifiers together. - if self.whitespace_after_await.empty: + if ( + self.whitespace_after_await.empty + and not self.expression._safe_to_use_with_word_operator( + ExpressionPosition.RIGHT + ) + ): raise CSTValidationError("Must have at least one space after await") def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "Await": diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index e4b8d284..ee440573 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -750,8 +750,6 @@ class AsName(CSTNode): raise CSTValidationError( "There must be at least one space between 'as' and name." ) - if self.whitespace_before_as.empty: - raise CSTValidationError("There must be at least one space before 'as'.") def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "AsName": return AsName( @@ -815,6 +813,16 @@ class ExceptHandler(CSTNode): raise CSTValidationError( "Must have at least one space after except when ExceptHandler has a type." ) + name = self.name + if ( + type_ is not None + and name is not None + and name.whitespace_before_as.empty + and not type_._safe_to_use_with_word_operator(ExpressionPosition.LEFT) + ): + raise CSTValidationError( + "Must have at least one space before as keyword in an except handler." + ) def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ExceptHandler": return ExceptHandler( @@ -1139,10 +1147,15 @@ class ImportAlias(CSTNode): def _validate(self) -> None: asname = self.asname - if asname is not None and not isinstance(asname.name, Name): - raise CSTValidationError( - "Must use a Name node for AsName name inside ImportAlias." - ) + if asname is not None: + if not isinstance(asname.name, Name): + raise CSTValidationError( + "Must use a Name node for AsName name inside ImportAlias." + ) + if asname.whitespace_before_as.empty: + raise CSTValidationError( + "Must have at least one space before as keyword in an ImportAlias." + ) try: self.evaluated_name except Exception as e: @@ -1986,6 +1999,15 @@ class WithItem(CSTNode): #: other items inside a with block must contain a comma to separate them. comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT + def _validate(self) -> None: + asname = self.asname + if ( + asname is not None + and asname.whitespace_before_as.empty + and not self.item._safe_to_use_with_word_operator(ExpressionPosition.LEFT) + ): + raise CSTValidationError("Must have at least one space before as keyword.") + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "WithItem": return WithItem( item=visit_required(self, "item", self.item, visitor), diff --git a/libcst/_nodes/tests/test_await.py b/libcst/_nodes/tests/test_await.py index 8aa408a9..1d52642b 100644 --- a/libcst/_nodes/tests/test_await.py +++ b/libcst/_nodes/tests/test_await.py @@ -46,6 +46,14 @@ class AwaitTest(CSTNodeTest): ), "expected_position": CodeRange((1, 2), (1, 13)), }, + # Whitespace after await + { + "node": cst.Await( + cst.Name("foo", lpar=[cst.LeftParen()], rpar=[cst.RightParen()]), + whitespace_after_await=cst.SimpleWhitespace(""), + ), + "code": "await(foo)", + }, ) ) def test_valid_py37(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_import.py b/libcst/_nodes/tests/test_import.py index eedfcdaf..d6ad8cbe 100644 --- a/libcst/_nodes/tests/test_import.py +++ b/libcst/_nodes/tests/test_import.py @@ -195,6 +195,20 @@ class ImportCreateTest(CSTNodeTest): ), "expected_re": "at least one space", }, + { + "get_node": lambda: cst.Import( + names=( + cst.ImportAlias( + cst.Name("foo"), + asname=cst.AsName( + cst.Name("bar"), + whitespace_before_as=cst.SimpleWhitespace(""), + ), + ), + ), + ), + "expected_re": "at least one space", + }, { "get_node": lambda: cst.Import( names=[ @@ -564,6 +578,25 @@ class ImportFromCreateTest(CSTNodeTest): ), "expected_re": "one space after import", }, + { + "get_node": lambda: cst.ImportFrom( + module=cst.Name("foo"), + names=( + cst.ImportAlias( + cst.Name("bar"), + asname=cst.AsName( + cst.Name( + "baz", + lpar=(cst.LeftParen(),), + rpar=(cst.RightParen(),), + ), + whitespace_before_as=cst.SimpleWhitespace(""), + ), + ), + ), + ), + "expected_re": "one space before as keyword", + }, ) ) def test_invalid(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index c04a9db2..a2e8a433 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -329,6 +329,24 @@ class TryTest(CSTNodeTest): "code": "try: pass\nexcept(IOError, ImportError): pass\n", "parser": parse_statement, }, + # No space before as + { + "node": cst.Try( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=[ + cst.ExceptHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + whitespace_after_except=cst.SimpleWhitespace(" "), + type=cst.Call(cst.Name("foo")), + name=cst.AsName( + whitespace_before_as=cst.SimpleWhitespace(""), + name=cst.Name("bar"), + ), + ) + ], + ), + "code": "try: pass\nexcept foo()as bar: pass\n", + }, ) ) def test_valid(self, **kwargs: Any) -> None: @@ -346,12 +364,6 @@ class TryTest(CSTNodeTest): ), "expected_re": "between 'as'", }, - { - "get_node": lambda: cst.AsName( - cst.Name("bla"), whitespace_before_as=cst.SimpleWhitespace("") - ), - "expected_re": "before 'as'", - }, { "get_node": lambda: cst.ExceptHandler( cst.SimpleStatementSuite((cst.Pass(),)), From 0f42a7824bf3a1a2e42732d4520c3033afbb2ed1 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 10 Feb 2022 13:43:30 -0800 Subject: [PATCH 218/632] Apply Type Comments: Allow for skipping quotes when applying type comments (#644) * Allow for skipping quotes when applying type comments * Fix bad flag (tests don't check argparse, I ran it on pytorch) * Run ufmt --- .../codemod/commands/convert_type_comments.py | 85 +++++++++++++++---- .../tests/test_convert_type_comments.py | 41 ++++++++- 2 files changed, 106 insertions(+), 20 deletions(-) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index baccae52..5af00273 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -3,6 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import argparse import ast import builtins import dataclasses @@ -83,18 +84,27 @@ def _is_builtin(annotation: str) -> bool: return annotation in _builtins() -def _convert_annotation(raw: str) -> cst.Annotation: - # Convert annotation comments to string annotations to be safe, - # otherwise runtime errors would be common. - # - # Special-case builtins to reduce the amount of quoting noise. - # - # NOTE: we could potentially detect more cases for skipping quotes - # using ScopeProvider, which would make the output prettier. +def _convert_annotation( + raw: str, + quote_annotations: bool, +) -> cst.Annotation: + """ + Convert a raw annotation - which is a string coming from a type + comment - into a suitable libcst Annotation node. + + If `quote_annotations`, we'll always quote annotations unless they are builtin + types. The reason for this is to make the codemod safer to apply + on legacy code where type comments may well include invalid types + that would crash at runtime. + """ if _is_builtin(raw): return cst.Annotation(annotation=cst.Name(value=raw)) - else: - return cst.Annotation(annotation=cst.SimpleString(f'"{raw}"')) + if not quote_annotations: + try: + return cst.Annotation(annotation=cst.parse_expression(raw)) + except cst.ParserSyntaxError: + pass + return cst.Annotation(annotation=cst.SimpleString(f'"{raw}"')) def _is_type_comment(comment: Optional[cst.Comment]) -> bool: @@ -195,10 +205,14 @@ class AnnotationSpreader: def type_declaration( binding: cst.BaseAssignTargetExpression, raw_annotation: str, + quote_annotations: bool, ) -> cst.AnnAssign: return cst.AnnAssign( target=binding, - annotation=_convert_annotation(raw=raw_annotation), + annotation=_convert_annotation( + raw=raw_annotation, + quote_annotations=quote_annotations, + ), value=None, ) @@ -207,6 +221,7 @@ class AnnotationSpreader: bindings: UnpackedBindings, annotations: UnpackedAnnotations, leading_lines: Sequence[cst.EmptyLine], + quote_annotations: bool, ) -> List[cst.SimpleStatementLine]: return [ cst.SimpleStatementLine( @@ -214,6 +229,7 @@ class AnnotationSpreader: AnnotationSpreader.type_declaration( binding=binding, raw_annotation=raw_annotation, + quote_annotations=quote_annotations, ) ], leading_lines=leading_lines if i == 0 else [], @@ -230,6 +246,7 @@ class AnnotationSpreader: def convert_Assign( node: cst.Assign, annotation: ast.expr, + quote_annotations: bool, ) -> Union[ _FailedToApplyAnnotation, cst.AnnAssign, @@ -255,7 +272,10 @@ def convert_Assign( binding, raw_annotation = annotated_targets[0][0] return cst.AnnAssign( target=binding, - annotation=_convert_annotation(raw=raw_annotation), + annotation=_convert_annotation( + raw=raw_annotation, + quote_annotations=quote_annotations, + ), value=node.value, semicolon=node.semicolon, ) @@ -264,7 +284,11 @@ def convert_Assign( # on the LHS or multiple `=` tokens or both), we need to add a type # declaration per individual LHS target. type_declarations = [ - AnnotationSpreader.type_declaration(binding, raw_annotation) + AnnotationSpreader.type_declaration( + binding, + raw_annotation, + quote_annotations=quote_annotations, + ) for annotated_bindings in annotated_targets for binding, raw_annotation in annotated_bindings ] @@ -388,7 +412,7 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): - For parameters, we prefer inline type comments to function-level type comments if we find both. - We always apply the type comments as quoted annotations, unless + We always apply the type comments as quote_annotations annotations, unless we know that it refers to a builtin. We do not guarantee that the resulting string annotations would parse, but they should never cause failures at module import time. @@ -427,7 +451,22 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): function_body_stack: List[cst.BaseSuite] aggressively_strip_type_comments: bool - def __init__(self, context: CodemodContext) -> None: + @staticmethod + def add_args(arg_parser: argparse.ArgumentParser) -> None: + arg_parser.add_argument( + "--no-quote-annotations", + action="store_true", + help=( + "Add unquoted annotations. This leads to prettier code " + + "but possibly more errors if type comments are invalid." + ), + ) + + def __init__( + self, + context: CodemodContext, + no_quote_annotations: bool = False, + ) -> None: if (sys.version_info.major, sys.version_info.minor) < (3, 9): # The ast module did not get `unparse` until Python 3.9, # or `type_comments` until Python 3.8 @@ -444,6 +483,9 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): + "it is only libcst that needs a new Python version." ) super().__init__(context) + # flags used to control overall behavior + self.quote_annotations: bool = not no_quote_annotations + # state used to manage how we traverse nodes in various contexts self.function_type_info_stack = [] self.function_body_stack = [] self.aggressively_strip_type_comments = False @@ -480,6 +522,7 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): converted = convert_Assign( node=assign, annotation=annotation, + quote_annotations=self.quote_annotations, ) if isinstance(converted, _FailedToApplyAnnotation): # We were unable to consume the type comment, so return the @@ -556,6 +599,7 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): bindings=AnnotationSpreader.unpack_target(updated_node.target), annotations=AnnotationSpreader.unpack_annotation(annotation), leading_lines=updated_node.leading_lines, + quote_annotations=self.quote_annotations, ) except _ArityError: return updated_node @@ -606,6 +650,7 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): bindings=AnnotationSpreader.unpack_target(target), annotations=AnnotationSpreader.unpack_annotation(annotation), leading_lines=updated_node.leading_lines, + quote_annotations=self.quote_annotations, ) except _ArityError: return updated_node @@ -773,7 +818,10 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): raw_annotation = function_type_info.arguments.get(updated_node.name.value) if raw_annotation is not None: return updated_node.with_changes( - annotation=_convert_annotation(raw=raw_annotation) + annotation=_convert_annotation( + raw=raw_annotation, + quote_annotations=self.quote_annotations, + ) ) else: return updated_node @@ -787,7 +835,10 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): function_type_info = self.function_type_info_stack.pop() if updated_node.returns is None and function_type_info.returns is not None: return updated_node.with_changes( - returns=_convert_annotation(raw=function_type_info.returns) + returns=_convert_annotation( + raw=function_type_info.returns, + quote_annotations=self.quote_annotations, + ) ) else: return updated_node diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index 6bd5a8a3..98eaa767 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -4,6 +4,7 @@ # LICENSE file in the root directory of this source tree. import sys +from typing import Any from libcst.codemod import CodemodTest from libcst.codemod.commands.convert_type_comments import ConvertTypeComments @@ -14,16 +15,16 @@ class TestConvertTypeCommentsBase(CodemodTest): maxDiff = 1500 TRANSFORM = ConvertTypeComments - def assertCodemod39Plus(self, before: str, after: str) -> None: + def assertCodemod39Plus(self, before: str, after: str, **kwargs: Any) -> None: """ Assert that the codemod works on Python 3.9+, and that we raise a NotImplementedError on other Python versions. """ if (sys.version_info.major, sys.version_info.minor) < (3, 9): with self.assertRaises(NotImplementedError): - super().assertCodemod(before, after) + super().assertCodemod(before, after, **kwargs) else: - super().assertCodemod(before, after) + super().assertCodemod(before, after, **kwargs) class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): @@ -436,3 +437,37 @@ class TestConvertTypeComments_FunctionDef(TestConvertTypeCommentsBase): """ after = before self.assertCodemod39Plus(before, after) + + def test_no_quoting(self) -> None: + before = """ + def f(x): + # type: (Foo) -> Foo + pass + w = x # type: Foo + y, z = x, x # type: (Foo, Foo) + return w + + with get_context() as context: # type: Context + pass + + for loop_var in the_iterable: # type: LoopType + pass + """ + after = """ + def f(x: Foo) -> Foo: + pass + w: Foo = x + y: Foo + z: Foo + y, z = x, x + return w + + context: Context + with get_context() as context: + pass + + loop_var: LoopType + for loop_var in the_iterable: + pass + """ + self.assertCodemod39Plus(before, after, no_quote_annotations=True) From f018d9924b805e13fe278c733febd22d15fd7b29 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 11 Feb 2022 06:26:33 -0800 Subject: [PATCH 219/632] Create an AddTrailingCommas codemod (#643) * Add ApplyTrailingCommas codemod This codemod adds trailing commas to parameter and arguments lists when there are sufficient arguments or parameters. The idea is this: - both black and yapf will generally split lines when there are trailing commas at the end of a parameter / arguments list - It's easier on my eye to have names and types in more predictable locations within a function header, i.e. left-aligned. And in function calls, I also find it easier to compare arguments to function parameters whenever the arguments are one-per line, at least when there are more than two arguments. By default, we ensure trailing commas for functions with one or more parameters (but do not include `self` or `cls` method arguments) which is suitable for `black`, and calls with 3 or more arguments. Both the parameter count and the argument count can be overridden. Moreover, by passing `--formatter yapf` someone can use the yapf-suitable default of 2 parameters which is handy since then the user doesn't have to memorize black vs yapf settings; this is necesary because yapf does not split lines after a trailing comma in one-argument defines. ``` > python -m unittest libcst.codemod.commands.tests.test_add_trailing_commas ...... ---------------------------------------------------------------------- Ran 6 tests in 0.134s OK ``` * Run ufmt, fix type error * Bump argument counts down to 2 --- .../codemod/commands/add_trailing_commas.py | 125 ++++++++++++++++++ .../tests/test_add_trailing_commas.py | 90 +++++++++++++ 2 files changed, 215 insertions(+) create mode 100644 libcst/codemod/commands/add_trailing_commas.py create mode 100644 libcst/codemod/commands/tests/test_add_trailing_commas.py diff --git a/libcst/codemod/commands/add_trailing_commas.py b/libcst/codemod/commands/add_trailing_commas.py new file mode 100644 index 00000000..45cbad7e --- /dev/null +++ b/libcst/codemod/commands/add_trailing_commas.py @@ -0,0 +1,125 @@ +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import argparse +import textwrap +from typing import Dict, Optional + +import libcst as cst +from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand + + +presets_per_formatter: Dict[str, Dict[str, int]] = { + "black": { + "parameter_count": 1, + "argument_count": 2, + }, + "yapf": { + "parameter_count": 2, + "argument_count": 2, + }, +} + + +class AddTrailingCommas(VisitorBasedCodemodCommand): + DESCRIPTION: str = textwrap.dedent( + """ + Codemod that adds trailing commas to arguments in function + headers and function calls. + + The idea is that both the black and yapf autoformatters will + tend to split headers and function calls so that there + is one parameter / argument per line if there is a trailing + comma: + - Black will always separate them by line + - Yapf appears to do so whenever there are at least two arguments + + Applying this codemod (and then an autoformatter) may make + it easier to read function definitions and calls + """ + ) + + def __init__( + self, + context: CodemodContext, + formatter: str = "black", + parameter_count: Optional[int] = None, + argument_count: Optional[int] = None, + ) -> None: + super().__init__(context) + presets = presets_per_formatter.get(formatter) + if presets is None: + raise ValueError( + f"Unknown formatter {formatter!r}. Presets exist for " + + ", ".join(presets_per_formatter.keys()) + ) + self.parameter_count: int = parameter_count or presets["parameter_count"] + self.argument_count: int = argument_count or presets["argument_count"] + + @staticmethod + def add_args(arg_parser: argparse.ArgumentParser) -> None: + arg_parser.add_argument( + "--formatter", + dest="formatter", + metavar="FORMATTER", + help="Formatter to target (e.g. yapf or black)", + type=str, + default="black", + ) + arg_parser.add_argument( + "--paramter-count", + dest="parameter_count", + metavar="PARAMETER_COUNT", + help="Minimal number of parameters for us to add trailing comma", + type=int, + default=None, + ) + arg_parser.add_argument( + "--argument-count", + dest="argument_count", + metavar="ARGUMENT_COUNT", + help="Minimal number of arguments for us to add trailing comma", + type=int, + default=None, + ) + + def leave_Parameters( + self, + original_node: cst.Parameters, + updated_node: cst.Parameters, + ) -> cst.Parameters: + skip = ( + # + self.parameter_count is None + or len(updated_node.params) < self.parameter_count + or ( + len(updated_node.params) == 1 + and updated_node.params[0].name.value in {"self", "cls"} + ) + ) + if skip: + return updated_node + else: + last_param = updated_node.params[-1] + return updated_node.with_changes( + params=( + *updated_node.params[:-1], + last_param.with_changes(comma=cst.Comma()), + ), + ) + + def leave_Call( + self, + original_node: cst.Call, + updated_node: cst.Call, + ) -> cst.Call: + if len(updated_node.args) < self.argument_count: + return updated_node + else: + last_arg = updated_node.args[-1] + return updated_node.with_changes( + args=( + *updated_node.args[:-1], + last_arg.with_changes(comma=cst.Comma()), + ), + ) diff --git a/libcst/codemod/commands/tests/test_add_trailing_commas.py b/libcst/codemod/commands/tests/test_add_trailing_commas.py new file mode 100644 index 00000000..1df31b69 --- /dev/null +++ b/libcst/codemod/commands/tests/test_add_trailing_commas.py @@ -0,0 +1,90 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# + +from libcst.codemod import CodemodTest +from libcst.codemod.commands.add_trailing_commas import AddTrailingCommas + + +class AddTrailingCommasTest(CodemodTest): + TRANSFORM = AddTrailingCommas + + def test_transform_defines(self) -> None: + before = """ + def f(x, y): + pass + + """ + after = """ + def f(x, y,): + pass + """ + self.assertCodemod(before, after) + + def test_skip_transforming_defines(self) -> None: + before = """ + # skip defines with no params. + def f0(): + pass + + # skip defines with a single param named `self`. + class Foo: + def __init__(self): + pass + """ + after = before + self.assertCodemod(before, after) + + def test_transform_calls(self) -> None: + before = """ + f(a, b, c) + + g(x=a, y=b, z=c) + """ + after = """ + f(a, b, c,) + + g(x=a, y=b, z=c,) + """ + self.assertCodemod(before, after) + + def test_skip_transforming_calls(self) -> None: + before = """ + # skip empty calls + f() + + # skip calls with one argument + g(a) + g(x=a) + """ + after = before + self.assertCodemod(before, after) + + def test_using_yapf_presets(self) -> None: + before = """ + def f(x): # skip single parameters for yapf + pass + + def g(x, y): + pass + """ + after = """ + def f(x): # skip single parameters for yapf + pass + + def g(x, y,): + pass + """ + self.assertCodemod(before, after, formatter="yapf") + + def test_using_custom_presets(self) -> None: + before = """ + def f(x, y, z): + pass + + f(5, 6, 7) + """ + after = before + self.assertCodemod(before, after, parameter_count=4, argument_count=4) From a2e9c4a2764b834524a7416d38e0a12fefabb147 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Mon, 14 Feb 2022 10:10:44 -0800 Subject: [PATCH 220/632] Clean up ApplyTypeAnnotationsVisitor (#646) * Remove unneeded block * Improve function name, add docstring * Rename _is_set -> _is_non_sentinel * Add docstring for FunctionKey * Add class attributes with doc blocks to TypeCollector * Extract Annotations into a single abstraction, not two * Nits + fix flake8 --- .../visitors/_apply_type_annotations.py | 198 ++++++++++++------ 1 file changed, 130 insertions(+), 68 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 5acc80a8..471e0314 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree # -from dataclasses import dataclass, field +from dataclasses import dataclass from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst as cst @@ -43,9 +43,17 @@ def _get_import_alias_names( return import_names -def _get_import_names( +def _get_imported_names( imports: Sequence[Union[cst.Import, cst.ImportFrom]], ) -> Set[str]: + """ + Given a series of import statements (both Import and ImportFrom), + determine all of the names that have been imported into the current + scope. For example: + - ``import foo.bar as bar, foo.baz`` produces ``{'bar', 'foo.baz'}`` + - ``from foo import (Bar, Baz as B)`` produces ``{'Bar', 'B'}`` + - ``from foo import *`` produces ``set()` because we cannot resolve names + """ import_names = set() for _import in imports: if isinstance(_import, cst.Import): @@ -57,7 +65,7 @@ def _get_import_names( return import_names -def _is_set( +def _is_non_sentinel( x: Union[None, cst.CSTNode, cst.MaybeSentinel], ) -> bool: return x is not None and x != cst.MaybeSentinel.DEFAULT @@ -81,6 +89,13 @@ def _find_generic_base( @dataclass(frozen=True) class FunctionKey: + """ + Class representing a funciton name and signature. + + This exists to ensure we do not attempt to apply stubs to functions whose + definition is incompatible. + """ + name: str pos: int kwonly: str @@ -97,8 +112,8 @@ class FunctionKey: pos = len(params.params) kwonly = ",".join(sorted(x.name.value for x in params.kwonly_params)) posonly = len(params.posonly_params) - star_arg = _is_set(params.star_arg) - star_kwarg = _is_set(params.star_kwarg) + star_arg = _is_non_sentinel(params.star_arg) + star_kwarg = _is_non_sentinel(params.star_kwarg) return cls( name, pos, @@ -115,6 +130,63 @@ class FunctionAnnotation: returns: Optional[cst.Annotation] +@dataclass +class Annotations: + """ + Represents all of the annotation information we might add to + a class: + - All data is keyed on the qualified name relative to the module root + - The ``functions`` field also keys on the signature so that we + do not apply stub types where the signature is incompatible. + + The idea is that + - ``functions`` contains all function and method type + information from the stub, and the qualifier for a method includes + the containing class names (e.g. "Cat.meow") + - ``attributes`` similarly contains all globals + and class-level attribute type information. + - The ``class_definitions`` field contains all of the classes + defined in the stub. Most of these classes will be ignored in + downstream logic (it is *not* used to annotate attributes or + method), but there are some cases like TypedDict where a + typing-only class needs to be injected. + - The field ``typevars`` contains the assign statement for all + type variables in the stub, and ``names`` tracks + all of the names used in annotations; together these fields + tell us which typevars should be included in the codemod + (all typevars that appear in annotations.) + """ + + # TODO: consider simplifying this in a few ways: + # - We could probably just inject all typevars, used or not. + # It doesn't seem to me that our codemod needs to act like + # a linter checking for unused names. + # - We could probably decide which classes are typing-only + # in the visitor rather than the codemod, which would make + # it easier to reason locally about (and document) how the + # class_definitions field works. + + functions: Dict[FunctionKey, FunctionAnnotation] + attributes: Dict[str, cst.Annotation] + class_definitions: Dict[str, cst.ClassDef] + typevars: Dict[str, cst.Assign] + names: Set[str] + + @classmethod + def empty(cls) -> "Annotations": + return Annotations({}, {}, {}, {}, set()) + + def update(self, other: "Annotations") -> None: + self.functions.update(other.functions) + self.attributes.update(other.attributes) + self.class_definitions.update(other.class_definitions) + self.typevars.update(other.typevars) + self.names.update(other.names) + + def finish(self) -> None: + self.typevars = {k: v for k, v in self.typevars.items() if k in self.names} + + class TypeCollector(m.MatcherDecoratableVisitor): """ Collect type annotations from a stub module. @@ -125,23 +197,27 @@ class TypeCollector(m.MatcherDecoratableVisitor): QualifiedNameProvider, ) + annotations: Annotations + def __init__( self, existing_imports: Set[str], context: CodemodContext, ) -> None: super().__init__() - # Qualifier for storing the canonical name of the current function. - self.qualifier: List[str] = [] - # Store the annotations. - self.function_annotations: Dict[FunctionKey, FunctionAnnotation] = {} - self.attribute_annotations: Dict[str, cst.Annotation] = {} - self.existing_imports: Set[str] = existing_imports - self.class_definitions: Dict[str, cst.ClassDef] = {} self.context = context + # Existing imports, determined by looking at the target module. + # Used to help us determine when a type in a stub will require new imports. + # + # The contents of this are fully-qualified names of types in scope + # as well as module names, although downstream we effectively ignore + # the module names as of the current implementation. + self.existing_imports: Set[str] = existing_imports + # Fields that help us track temporary state as we recurse + self.qualifier: List[str] = [] self.current_assign: Optional[cst.Assign] = None # used to collect typevars - self.typevars: Dict[str, cst.Assign] = {} - self.annotation_names: Set[str] = set() + # Store the annotations. + self.annotations = Annotations.empty() def visit_ClassDef( self, @@ -164,7 +240,9 @@ class TypeCollector(m.MatcherDecoratableVisitor): ) new_bases.append(base.with_changes(value=new_value)) - self.class_definitions[node.name.value] = node.with_changes(bases=new_bases) + self.annotations.class_definitions[node.name.value] = node.with_changes( + bases=new_bases + ) def leave_ClassDef( self, @@ -184,7 +262,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): parameter_annotations = self._handle_Parameters(node.params) name = ".".join(self.qualifier) key = FunctionKey.make(name, node.params) - self.function_annotations[key] = FunctionAnnotation( + self.annotations.functions[key] = FunctionAnnotation( parameters=parameter_annotations, returns=return_annotation ) @@ -205,7 +283,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): if name is not None: self.qualifier.append(name) annotation_value = self._handle_Annotation(annotation=node.annotation) - self.attribute_annotations[".".join(self.qualifier)] = annotation_value + self.annotations.attributes[".".join(self.qualifier)] = annotation_value return True def leave_AnnAssign( @@ -236,7 +314,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): name = get_full_name_for_node(self.current_assign.targets[0].target) if name: # pyre-ignore current_assign is never None here - self.typevars[name] = self.current_assign + self.annotations.typevars[name] = self.current_assign self._handle_qualification_and_should_qualify("typing.TypeVar") self.current_assign = None @@ -244,10 +322,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): self, original_node: cst.Module, ) -> None: - # Filter out unused typevars - self.typevars = { - k: v for k, v in self.typevars.items() if k in self.annotation_names - } + self.annotations.finish() def _get_unique_qualified_name( self, @@ -307,9 +382,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): if module in ("", "builtins"): return False elif qualified_name not in self.existing_imports: - if module == "builtins": - return False - elif module in self.existing_imports: + if module in self.existing_imports: return True else: AddImportsVisitor.add_needed_import( @@ -336,7 +409,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): dequalified_node, ) = self._get_qualified_name_and_dequalified_node(node) should_qualify = self._handle_qualification_and_should_qualify(qualified_name) - self.annotation_names.add(qualified_name) + self.annotations.names.add(qualified_name) if should_qualify: return node else: @@ -353,7 +426,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): return slice.with_changes(value=self._handle_NameOrAttribute(value)) else: if isinstance(value, cst.SimpleString): - self.annotation_names.add(_get_string_value(value)) + self.annotations.names.add(_get_string_value(value)) return slice def _handle_Subscript( @@ -400,7 +473,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): ) -> cst.Annotation: node = annotation.annotation if isinstance(node, cst.SimpleString): - self.annotation_names.add(_get_string_value(node)) + self.annotations.names.add(_get_string_value(node)) return annotation elif isinstance(node, cst.Subscript): return cst.Annotation(annotation=self._handle_Subscript(node)) @@ -429,16 +502,6 @@ class TypeCollector(m.MatcherDecoratableVisitor): return parameters.with_changes(params=update_annotations(parameters.params)) -@dataclass(frozen=True) -class Annotations: - function_annotations: Dict[FunctionKey, FunctionAnnotation] = field( - default_factory=dict - ) - attribute_annotations: Dict[str, cst.Annotation] = field(default_factory=dict) - class_definitions: Dict[str, cst.ClassDef] = field(default_factory=dict) - typevars: Dict[str, cst.Assign] = field(default_factory=dict) - - @dataclass class AnnotationCounts: global_annotations: int = 0 @@ -509,7 +572,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # Qualifier for storing the canonical name of the current function. self.qualifier: List[str] = [] self.annotations: Annotations = ( - Annotations() if annotations is None else annotations + Annotations.empty() if annotations is None else annotations ) self.toplevel_annotations: Dict[str, cst.Annotation] = {} self.visited_classes: Set[str] = set() @@ -570,7 +633,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): """ import_gatherer = GatherImportsVisitor(CodemodContext()) tree.visit(import_gatherer) - existing_import_names = _get_import_names(import_gatherer.all_imports) + existing_import_names = _get_imported_names(import_gatherer.all_imports) context_contents = self.context.scratch.get( ApplyTypeAnnotationsVisitor.CONTEXT_KEY @@ -597,25 +660,22 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): ) visitor = TypeCollector(existing_import_names, self.context) cst.MetadataWrapper(stub).visit(visitor) - self.annotations.function_annotations.update(visitor.function_annotations) - self.annotations.attribute_annotations.update(visitor.attribute_annotations) - self.annotations.class_definitions.update(visitor.class_definitions) - self.annotations.typevars.update(visitor.typevars) + self.annotations.update(visitor.annotations) - tree_with_imports = AddImportsVisitor( - context=self.context, - imports=( - [ - ImportItem( - "__future__", - "annotations", - None, - ) - ] - if self.use_future_annotations - else () - ), - ).transform_module(tree) + tree_with_imports = AddImportsVisitor( + context=self.context, + imports=( + [ + ImportItem( + "__future__", + "annotations", + None, + ) + ] + if self.use_future_annotations + else () + ), + ).transform_module(tree) tree_with_changes = tree_with_imports.visit(self) # don't modify the imports if we didn't actually add any type information @@ -684,12 +744,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): if name is not None: self.qualifier.append(name) if ( - self._qualifier_name() in self.annotations.attribute_annotations + self._qualifier_name() in self.annotations.attributes and not isinstance(only_target, cst.Subscript) ): - annotation = self.annotations.attribute_annotations[ - self._qualifier_name() - ] + annotation = self.annotations.attributes[self._qualifier_name()] self.qualifier.pop() return self._apply_annotation_to_attribute_or_global( name=name, @@ -731,8 +789,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): name: str, ) -> None: self.qualifier.append(name) - if self._qualifier_name() in self.annotations.attribute_annotations: - annotation = self.annotations.attribute_annotations[self._qualifier_name()] + if self._qualifier_name() in self.annotations.attributes: + annotation = self.annotations.attributes[self._qualifier_name()] self.toplevel_annotations[name] = annotation self.qualifier.pop() @@ -823,7 +881,11 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): p: Optional[cst.Annotation], q: Optional[cst.Annotation], ) -> bool: - if self.overwrite_existing_annotations or not _is_set(p) or not _is_set(q): + if ( + self.overwrite_existing_annotations + or not _is_non_sentinel(p) + or not _is_non_sentinel(q) + ): return True if not self.strict_annotation_matching: # We will not overwrite clashing annotations, but the signature as a @@ -861,7 +923,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): p: StarParamType, q: StarParamType, ) -> bool: - return _is_set(p) == _is_set(q) + return _is_non_sentinel(p) == _is_non_sentinel(q) def match_params( f: cst.FunctionDef, @@ -927,8 +989,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): ) -> cst.FunctionDef: key = FunctionKey.make(self._qualifier_name(), updated_node.params) self.qualifier.pop() - if key in self.annotations.function_annotations: - function_annotation = self.annotations.function_annotations[key] + if key in self.annotations.functions: + function_annotation = self.annotations.functions[key] # Only add new annotation if: # * we have matching function signatures and # * we are explicitly told to overwrite existing annotations or From 0eb839d4f933c231195e030cb8c2983d644fa7a6 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Tue, 15 Feb 2022 11:33:45 -0800 Subject: [PATCH 221/632] Add a DESCRIPTION to ApplyTypeComments (#647) --- libcst/codemod/commands/convert_type_comments.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index 5af00273..bd7d902f 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -394,10 +394,20 @@ class FunctionTypeInfo: class ConvertTypeComments(VisitorBasedCodemodCommand): - """ + DESCRIPTION = """ Codemod that converts type comments into Python 3.6+ style annotations. + Notes: + - This transform requires using the `ast` module, which is not compatible + with multiprocessing. So you should run using a recent version of python, + and set `--jobs=1` if using `python -m libcst.tool codemod ...` from the + commandline. + - This transform requires capabilities from `ast` that are not available + prior to Python 3.9, so libcst must run on Python 3.9+. The code you are + transforming can by Python 3.6+, this limitation applies only to libcst + itself. + We can handle type comments in the following statement types: - Assign - This is converted into a single AnnAssign when possible From bb67d7a6cdb5f747488679ae814e4443fdad9e7f Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 16 Feb 2022 09:57:02 -0800 Subject: [PATCH 222/632] Add a check for copyright headers to lint stage (#648) --- .github/workflows/build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 007d64ec..bb490da6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -65,6 +65,7 @@ jobs: - run: ufmt check . - run: python3 -m fixit.cli.run_rules - run: python -m slotscheck libcst + - run: ./check_copyright.sh # Run pyre typechecker typecheck: From f2cd39c2b9efe9f0a17ef3853d6096dff94e6ad5 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Wed, 16 Feb 2022 10:17:28 -0800 Subject: [PATCH 223/632] Fix the copyright heading on add_trailing_commas.py (#649) --- libcst/codemod/commands/add_trailing_commas.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/libcst/codemod/commands/add_trailing_commas.py b/libcst/codemod/commands/add_trailing_commas.py index 45cbad7e..2f33a4bd 100644 --- a/libcst/codemod/commands/add_trailing_commas.py +++ b/libcst/codemod/commands/add_trailing_commas.py @@ -1,3 +1,5 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. From bb6d150acd59a26a49deacf166aaeb0f0e0d309d Mon Sep 17 00:00:00 2001 From: Stanislav Levin Date: Fri, 18 Feb 2022 20:10:56 +0300 Subject: [PATCH 224/632] tests: Fix assumption about sorting in test_ordering (#650) The test wrongly assumed that `first_assignment.references` is ordered collection, while actually it is `set`. Fixes: https://github.com/Instagram/LibCST/issues/442 Signed-off-by: Stanislav Levin --- libcst/metadata/tests/test_scope_provider.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 4e65de62..ded9ab1e 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1531,19 +1531,20 @@ class ScopeProviderTest(UnitTest): first_assignment = list(global_scope.assignments)[0] assert isinstance(first_assignment, cst.metadata.Assignment) self.assertEqual(first_assignment.node, import_stmt) - global_refs = list(first_assignment.references) + global_refs = first_assignment.references self.assertEqual(len(global_refs), 2) + global_refs_nodes = {x.node for x in global_refs} class_def = ensure_type(m.body[1], cst.ClassDef) x = ensure_type( ensure_type(class_def.body.body[0], cst.SimpleStatementLine).body[0], cst.Assign, ) - self.assertEqual(x.value, global_refs[0].node) + self.assertIn(x.value, global_refs_nodes) class_b = ensure_type( ensure_type(class_def.body.body[1], cst.SimpleStatementLine).body[0], cst.Assign, ) - self.assertEqual(class_b.value, global_refs[1].node) + self.assertIn(class_b.value, global_refs_nodes) class_accesses = list(scopes[x].accesses) self.assertEqual(len(class_accesses), 3) From e8c84572e4257ca2d2cc8fb43b192484fb768f32 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 18 Feb 2022 09:56:54 -0800 Subject: [PATCH 225/632] Port pyre fixes (#651) * Port https://github.com/facebook/pyre-check/commit/c3b44cb9d3d911335d9482186da56c0478437177 * Port https://github.com/facebook/pyre-check/commit/138c97cb7021fca5f6dab1b868ec4a0c2c710964 * Test harness for the next commit * Port https://github.com/facebook/pyre-check/commit/2cdc4ba237753e489fdc809339a212b8b54fdacd * Test harness for next commit * Port https://github.com/facebook/pyre-check/commit/71c5da81699062e06f93dcfe71e0208c074c04e9 * Remove no-longer-used import --- .../visitors/_apply_type_annotations.py | 44 ++++++++--------- .../tests/test_apply_type_annotations.py | 48 +++++++++++++++++++ 2 files changed, 67 insertions(+), 25 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 471e0314..cc610528 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -14,7 +14,6 @@ from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor -from libcst.codemod.visitors._imports import ImportItem from libcst.helpers import get_full_name_for_node from libcst.metadata import PositionProvider, QualifiedNameProvider @@ -312,7 +311,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): ) -> None: # pyre-ignore current_assign is never None here name = get_full_name_for_node(self.current_assign.targets[0].target) - if name: + if name is not None: # pyre-ignore current_assign is never None here self.annotations.typevars[name] = self.current_assign self._handle_qualification_and_should_qualify("typing.TypeVar") @@ -530,10 +529,12 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): This is one of the transforms that is available automatically to you when running a codemod. To use it in this manner, import - :class:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor` and then call the static - :meth:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor.store_stub_in_context` method, - giving it the current context (found as ``self.context`` for all subclasses of - :class:`~libcst.codemod.Codemod`), the stub module from which you wish to add annotations. + :class:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor` and then call + the static + :meth:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor.store_stub_in_context` + method, giving it the current context (found as ``self.context`` for all + subclasses of :class:`~libcst.codemod.Codemod`), the stub module from which + you wish to add annotations. For example, you can store the type annotation ``int`` for ``x`` using:: @@ -550,7 +551,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): x: int = 1 - If the function or attribute already has a type annotation, it will not be overwritten. + If the function or attribute already has a type annotation, it will not be + overwritten. To overwrite existing annotations when applying annotations from a stub, use the keyword argument ``overwrite_existing_annotations=True`` when @@ -662,20 +664,12 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): cst.MetadataWrapper(stub).visit(visitor) self.annotations.update(visitor.annotations) - tree_with_imports = AddImportsVisitor( - context=self.context, - imports=( - [ - ImportItem( - "__future__", - "annotations", - None, - ) - ] - if self.use_future_annotations - else () - ), - ).transform_module(tree) + if self.use_future_annotations: + AddImportsVisitor.add_needed_import( + self.context, "__future__", "annotations" + ) + tree_with_imports = AddImportsVisitor(self.context).transform_module(tree) + tree_with_changes = tree_with_imports.visit(self) # don't modify the imports if we didn't actually add any type information @@ -735,7 +729,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): for element in only_target.elements: value = element.value name = get_full_name_for_node(value) - if name: + if name is not None and name != "_": self._add_to_toplevel_annotations(name) elif isinstance(only_target, (cst.Subscript)): pass @@ -828,7 +822,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): annotated_parameters.append(parameter) return annotated_parameters - return annotations.parameters.with_changes( + return updated_node.params.with_changes( params=update_annotation( updated_node.params.params, annotations.parameters.params, @@ -1024,7 +1018,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): ) -> None: # pyre-ignore current_assign is never None here name = get_full_name_for_node(self.current_assign.targets[0].target) - if name: + if name is not None: # Preserve the whole node, even though we currently just use the # name, so that we can match bounds and variance at some point and # determine if two typevars with the same name are indeed the same. @@ -1046,7 +1040,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): target = assign.target if isinstance(target, (cst.Name, cst.Attribute)): name = get_full_name_for_node(target) - if name is not None: + if name is not None and name != "_": # Add separate top-level annotations for `a = b = 1` # as `a: int` and `b: int`. self._add_to_toplevel_annotations(name) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 03e5ffbd..c7e3695d 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -154,6 +154,29 @@ class TestApplyAnnotationsVisitor(CodemodTest): x2: Optional[T2] = None """, ), + "splitting_multi_assigns": ( + """ + a: str = ... + x: int = ... + y: int = ... + _: str = ... + z: str = ... + """, + """ + a = 'a' + x, y = 1, 2 + _, z = 'hello world'.split() + """, + """ + x: int + y: int + z: str + + a: str = 'a' + x, y = 1, 2 + _, z = 'hello world'.split() + """, + ), } ) def test_annotate_globals(self, stub: str, before: str, after: str) -> None: @@ -468,6 +491,31 @@ class TestApplyAnnotationsVisitor(CodemodTest): return respond(r, b) """, ), + "with_variadic_arguments": ( + """ + def incomplete_stubs_with_stars( + x: int, + *args, + **kwargs, + ) -> None: ... + """, + """ + def incomplete_stubs_with_stars( + x, + *args: P.args, + **kwargs: P.kwargs, + ): + pass + """, + """ + def incomplete_stubs_with_stars( + x: int, + *args: P.args, + **kwargs: P.kwargs, + ) -> None: + pass + """, + ), # test cases named with the REQUIRES_PREEXISTING prefix are verifying # that certain special cases work if the stub and the existing code # happen to align well, but none of these cases are guaranteed to work From 775beec38fb301ea1c2d8259b1e8a3360910aa6e Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 18 Feb 2022 10:52:08 -0800 Subject: [PATCH 226/632] Remove trailing comment line from LICENSE header (#652) --- libcst/codemod/visitors/_apply_type_annotations.py | 1 - 1 file changed, 1 deletion(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index cc610528..2b56d9fa 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -2,7 +2,6 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree -# from dataclasses import dataclass from typing import Dict, List, Optional, Sequence, Set, Tuple, Union From f7417febe71c1fa8581b40dbf456d7d7d6025e62 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Fri, 18 Feb 2022 12:10:44 -0800 Subject: [PATCH 227/632] Tweak the license format again. (#653) --- libcst/codemod/visitors/_apply_type_annotations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 2b56d9fa..b439da82 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -1,7 +1,7 @@ # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree +# LICENSE file in the root directory of this source tree. from dataclasses import dataclass from typing import Dict, List, Optional, Sequence, Set, Tuple, Union From 17137014ac38b066f3b6e86f5eb36e20e2efbb58 Mon Sep 17 00:00:00 2001 From: Dmitry Vinnik Date: Fri, 4 Mar 2022 11:04:43 -0800 Subject: [PATCH 228/632] docs: add social button in support of Ukraine (#655) ## Summary Our mission at Meta Open Source is to empower communities through open source, and we believe that it means building a welcoming and safe environment for all. As a part of this work, we are adding this banner in support for Ukraine during this crisis. --- README.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 94616832..f5a09998 100644 --- a/README.rst +++ b/README.rst @@ -4,8 +4,12 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python -|readthedocs-badge| |ci-badge| |codecov-badge| |pypi-badge| |pypi-download| |notebook-badge| +|support-ukraine| |readthedocs-badge| |ci-badge| |codecov-badge| |pypi-badge| |pypi-download| |notebook-badge| +.. |support-ukraine| image:: https://img.shields.io/badge/Support-Ukraine-FFD500?style=flat&labelColor=005BBB + :alt: Support Ukraine - Help Provide Humanitarian Aid to Ukraine. + :target: https://opensource.fb.com/support-ukraine + .. |readthedocs-badge| image:: https://readthedocs.org/projects/pip/badge/?version=latest&style=flat :target: https://libcst.readthedocs.io/en/latest/ :alt: Documentation From f863febc4de0d3206f539a64362fd939d11b6cf7 Mon Sep 17 00:00:00 2001 From: Shannon Zhu Date: Wed, 9 Mar 2022 10:53:44 -0800 Subject: [PATCH 229/632] Define gather global names visitor (#657) * Define gather global names visitor * Quote forward references when applying annotations --- libcst/codemod/visitors/__init__.py | 2 + .../visitors/_apply_type_annotations.py | 44 ++++++- .../codemod/visitors/_gather_global_names.py | 75 +++++++++++ .../tests/test_apply_type_annotations.py | 118 ++++++++++++++++++ .../tests/test_gather_global_names.py | 54 ++++++++ 5 files changed, 289 insertions(+), 4 deletions(-) create mode 100644 libcst/codemod/visitors/_gather_global_names.py create mode 100644 libcst/codemod/visitors/tests/test_gather_global_names.py diff --git a/libcst/codemod/visitors/__init__.py b/libcst/codemod/visitors/__init__.py index 1cbbd2c8..632d6fa6 100644 --- a/libcst/codemod/visitors/__init__.py +++ b/libcst/codemod/visitors/__init__.py @@ -7,6 +7,7 @@ from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._apply_type_annotations import ApplyTypeAnnotationsVisitor from libcst.codemod.visitors._gather_comments import GatherCommentsVisitor from libcst.codemod.visitors._gather_exports import GatherExportsVisitor +from libcst.codemod.visitors._gather_global_names import GatherGlobalNamesVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor from libcst.codemod.visitors._gather_string_annotation_names import ( GatherNamesFromStringAnnotationsVisitor, @@ -20,6 +21,7 @@ __all__ = [ "ApplyTypeAnnotationsVisitor", "GatherCommentsVisitor", "GatherExportsVisitor", + "GatherGlobalNamesVisitor", "GatherImportsVisitor", "GatherNamesFromStringAnnotationsVisitor", "GatherUnusedImportsVisitor", diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index b439da82..fe74b391 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -12,6 +12,7 @@ import libcst.matchers as m from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor +from libcst.codemod.visitors._gather_global_names import GatherGlobalNamesVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor from libcst.helpers import get_full_name_for_node from libcst.metadata import PositionProvider, QualifiedNameProvider @@ -595,6 +596,11 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.current_assign: Optional[cst.Assign] = None self.typevars: Dict[str, cst.Assign] = {} + # Global variables and classes defined on the toplevel of the target module. + # Used to help determine which names we need to check are in scope, and add + # quotations to avoid undefined forward references in type annotations. + self.global_names: Set[str] = set() + @staticmethod def store_stub_in_context( context: CodemodContext, @@ -631,11 +637,19 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): Collect type annotations from all stubs and apply them to ``tree``. Gather existing imports from ``tree`` so that we don't add duplicate imports. + + Gather global names from ``tree`` so forward references are quoted. """ import_gatherer = GatherImportsVisitor(CodemodContext()) tree.visit(import_gatherer) existing_import_names = _get_imported_names(import_gatherer.all_imports) + global_names_gatherer = GatherGlobalNamesVisitor(CodemodContext()) + tree.visit(global_names_gatherer) + self.global_names = global_names_gatherer.global_names.union( + global_names_gatherer.class_names + ) + context_contents = self.context.scratch.get( ApplyTypeAnnotationsVisitor.CONTEXT_KEY ) @@ -677,6 +691,26 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): else: return tree + # helpers for processing annotation nodes + def _quote_future_annotations(self, annotation: cst.Annotation) -> cst.Annotation: + # TODO: We probably want to make sure references to classes defined in the current + # module come to us fully qualified - so we can do the dequalification here and + # know to look for what is in-scope without also catching builtins like "None" in the + # quoting. This should probably also be extended to handle what imports are in scope, + # as well as subscriptable types. + # Note: We are collecting all imports and passing this to the type collector grabbing + # annotations from the stub file; should consolidate import handling somewhere too. + node = annotation.annotation + if ( + isinstance(node, cst.Name) + and (node.value in self.global_names) + and not (node.value in self.visited_classes) + ): + return annotation.with_changes( + annotation=cst.SimpleString(value=f'"{node.value}"') + ) + return annotation + # smart constructors: all applied annotations happen via one of these def _apply_annotation_to_attribute_or_global( @@ -691,7 +725,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.annotation_counts.attribute_annotations += 1 return cst.AnnAssign( cst.Name(name), - annotation, + self._quote_future_annotations(annotation), value, ) @@ -702,7 +736,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): ) -> cst.Param: self.annotation_counts.parameter_annotations += 1 return parameter.with_changes( - annotation=annotation, + annotation=self._quote_future_annotations(annotation), ) def _apply_annotation_to_return( @@ -711,7 +745,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): annotation: cst.Annotation, ) -> cst.FunctionDef: self.annotation_counts.return_annotations += 1 - return function_def.with_changes(returns=annotation) + return function_def.with_changes( + returns=self._quote_future_annotations(annotation), + ) # private methods used in the visit and leave methods @@ -948,13 +984,13 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): node: cst.ClassDef, ) -> None: self.qualifier.append(node.name.value) - self.visited_classes.add(node.name.value) def leave_ClassDef( self, original_node: cst.ClassDef, updated_node: cst.ClassDef, ) -> cst.ClassDef: + self.visited_classes.add(original_node.name.value) cls_name = ".".join(self.qualifier) self.qualifier.pop() definition = self.annotations.class_definitions.get(cls_name) diff --git a/libcst/codemod/visitors/_gather_global_names.py b/libcst/codemod/visitors/_gather_global_names.py new file mode 100644 index 00000000..c4a5d57d --- /dev/null +++ b/libcst/codemod/visitors/_gather_global_names.py @@ -0,0 +1,75 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Set + +import libcst +from libcst.codemod._context import CodemodContext +from libcst.codemod._visitor import ContextAwareVisitor + + +class GatherGlobalNamesVisitor(ContextAwareVisitor): + """ + Gathers all globally accessible names defined in a module and stores them as + attributes on the instance. + Intended to be instantiated and passed to a :class:`~libcst.Module` + :meth:`~libcst.CSTNode.visit` method in order to gather up information about + names defined on a module. Note that this is not a substitute for scope + analysis or qualified name support. Please see :ref:`libcst-scope-tutorial` + for a more robust way of determining the qualified name and definition for + an arbitrary node. + Names that are globally accessible through imports are currently not included + but can be retrieved with GatherImportsVisitor. + + After visiting a module the following attributes will be populated: + + global_names + A sequence of strings representing global variables defined in the module + toplevel. + class_names + A sequence of strings representing classes defined in the module toplevel. + function_names + A sequence of strings representing functions defined in the module toplevel. + + """ + + def __init__(self, context: CodemodContext) -> None: + super().__init__(context) + self.global_names: Set[str] = set() + self.class_names: Set[str] = set() + self.function_names: Set[str] = set() + # Track scope nesting + self.scope_depth: int = 0 + + def visit_ClassDef(self, node: libcst.ClassDef) -> None: + if self.scope_depth == 0: + self.class_names.add(node.name.value) + self.scope_depth += 1 + + def leave_ClassDef(self, original_node: libcst.ClassDef) -> None: + self.scope_depth -= 1 + + def visit_FunctionDef(self, node: libcst.FunctionDef) -> None: + if self.scope_depth == 0: + self.function_names.add(node.name.value) + self.scope_depth += 1 + + def leave_FunctionDef(self, original_node: libcst.FunctionDef) -> None: + self.scope_depth -= 1 + + def visit_Assign(self, node: libcst.Assign) -> None: + if self.scope_depth != 0: + return + for assign_target in node.targets: + target = assign_target.target + if isinstance(target, libcst.Name): + self.global_names.add(target.value) + + def visit_AnnAssign(self, node: libcst.AnnAssign) -> None: + if self.scope_depth != 0: + return + target = node.target + if isinstance(target, libcst.Name): + self.global_names.add(target.value) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index c7e3695d..9a3e59d7 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -1058,6 +1058,124 @@ class TestApplyAnnotationsVisitor(CodemodTest): overwrite_existing_annotations=True, ) + @data_provider( + { + "return_self": ( + """ + class Foo: + def f(self) -> Foo: ... + """, + """ + class Foo: + def f(self): + return self + """, + """ + class Foo: + def f(self) -> "Foo": + return self + """, + ), + "return_forward_reference": ( + """ + class Foo: + def f(self) -> Bar: ... + + class Bar: + ... + """, + """ + class Foo: + def f(self): + return Bar() + + class Bar: + pass + """, + """ + class Foo: + def f(self) -> "Bar": + return Bar() + + class Bar: + pass + """, + ), + "return_backward_reference": ( + """ + class Bar: + ... + + class Foo: + def f(self) -> Bar: ... + """, + """ + class Bar: + pass + + class Foo: + def f(self): + return Bar() + """, + """ + class Bar: + pass + + class Foo: + def f(self) -> Bar: + return Bar() + """, + ), + "return_undefined_name": ( + """ + class Foo: + def f(self) -> Bar: ... + """, + """ + class Foo: + def f(self): + return self + """, + """ + class Foo: + def f(self) -> Bar: + return self + """, + ), + "parameter_forward_reference": ( + """ + def f(input: Bar) -> None: ... + + class Bar: + ... + """, + """ + def f(input): + pass + + class Bar: + pass + """, + """ + def f(input: "Bar") -> None: + pass + + class Bar: + pass + """, + ), + } + ) + def test_annotate_with_forward_references( + self, stub: str, before: str, after: str + ) -> None: + self.run_test_case_with_flags( + stub=stub, + before=before, + after=after, + overwrite_existing_annotations=True, + ) + @data_provider( { "fully_annotated_with_untyped_stub": ( diff --git a/libcst/codemod/visitors/tests/test_gather_global_names.py b/libcst/codemod/visitors/tests/test_gather_global_names.py new file mode 100644 index 00000000..8a7a7b8b --- /dev/null +++ b/libcst/codemod/visitors/tests/test_gather_global_names.py @@ -0,0 +1,54 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +from libcst import parse_module +from libcst.codemod import CodemodContext, CodemodTest +from libcst.codemod.visitors import GatherGlobalNamesVisitor +from libcst.testing.utils import UnitTest + + +class TestGatherGlobalNamesVisitor(UnitTest): + def gather_global_names(self, code: str) -> GatherGlobalNamesVisitor: + transform_instance = GatherGlobalNamesVisitor( + CodemodContext(full_module_name="a.b.foobar") + ) + input_tree = parse_module(CodemodTest.make_fixture_data(code)) + input_tree.visit(transform_instance) + return transform_instance + + def test_gather_nothing(self) -> None: + code = """ + from a import b + b() + """ + gatherer = self.gather_global_names(code) + self.assertEqual(gatherer.global_names, set()) + self.assertEqual(gatherer.class_names, set()) + self.assertEqual(gatherer.function_names, set()) + + def test_globals(self) -> None: + code = """ + x = 1 + y = 2 + def foo(): pass + class Foo: pass + """ + gatherer = self.gather_global_names(code) + self.assertEqual(gatherer.global_names, {"x", "y"}) + self.assertEqual(gatherer.class_names, {"Foo"}) + self.assertEqual(gatherer.function_names, {"foo"}) + + def test_omit_nested(self) -> None: + code = """ + def foo(): + x = 1 + + class Foo: + def method(self): pass + """ + gatherer = self.gather_global_names(code) + self.assertEqual(gatherer.global_names, set()) + self.assertEqual(gatherer.class_names, {"Foo"}) + self.assertEqual(gatherer.function_names, {"foo"}) From 914b18339d0cce12d4de8db7593ea9d1819c363a Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 23 Mar 2022 14:17:25 -0400 Subject: [PATCH 230/632] Support module and package names in the codemod context (#662) * Support module and package names in the codemod context * PR feedback * Reorganize module name and relative name logic to libcst.helpers.module * Force rebuild --- libcst/codemod/_cli.py | 39 ++---- libcst/codemod/_context.py | 6 + libcst/codemod/tests/test_cli.py | 66 ---------- libcst/helpers/__init__.py | 16 ++- libcst/helpers/_statement.py | 58 --------- libcst/helpers/module.py | 97 ++++++++++++++- libcst/helpers/tests/test_module.py | 164 +++++++++++++++++++++++-- libcst/helpers/tests/test_statement.py | 87 ------------- 8 files changed, 276 insertions(+), 257 deletions(-) delete mode 100644 libcst/codemod/tests/test_cli.py delete mode 100644 libcst/helpers/_statement.py delete mode 100644 libcst/helpers/tests/test_statement.py diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index a7b18783..4726a34f 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -16,7 +16,7 @@ import time import traceback from dataclasses import dataclass, replace from multiprocessing import cpu_count, Pool -from pathlib import Path, PurePath +from pathlib import Path from typing import Any, AnyStr, cast, Dict, List, Optional, Sequence, Union from libcst import parse_module, PartialParserConfig @@ -32,6 +32,7 @@ from libcst.codemod._runner import ( TransformSkip, TransformSuccess, ) +from libcst.helpers import calculate_module_and_package from libcst.metadata import FullRepoManager _DEFAULT_GENERATED_CODE_MARKER: str = f"@gen{''}erated" @@ -184,30 +185,6 @@ def exec_transform_with_prettyprint( return maybe_code -def _calculate_module(repo_root: Optional[str], filename: str) -> Optional[str]: - # Given an absolute repo_root and an absolute filename, calculate the - # python module name for the file. - if repo_root is None: - # We don't have a repo root, so this is impossible to calculate. - return None - - try: - relative_filename = PurePath(filename).relative_to(repo_root) - except ValueError: - # This file seems to be out of the repo root. - return None - - # get rid of extension - relative_filename = relative_filename.with_suffix("") - - # get rid of any special cases - if relative_filename.stem in ["__init__", "__main__"]: - relative_filename = relative_filename.parent - - # Now, convert to dots to represent the python module. - return ".".join(relative_filename.parts) - - @dataclass(frozen=True) class ExecutionResult: # File we have results for @@ -271,10 +248,20 @@ def _execute_transform( # noqa: C901 transformer.context = replace( transformer.context, filename=filename, - full_module_name=_calculate_module(config.repo_root, filename), scratch={}, ) + # attempt to work out the module and package name for this file + module_name_and_package = calculate_module_and_package( + config.repo_root, filename + ) + if module_name_and_package is not None: + transformer.context = replace( + transformer.context, + full_module_name=module_name_and_package.name, + full_package_name=module_name_and_package.package, + ) + # Run the transform, bail if we failed or if we aren't formatting code try: input_tree = parse_module( diff --git a/libcst/codemod/_context.py b/libcst/codemod/_context.py index 04154859..47373df4 100644 --- a/libcst/codemod/_context.py +++ b/libcst/codemod/_context.py @@ -44,6 +44,12 @@ class CodemodContext: #: in the repo named ``foo/bar/baz.py``. full_module_name: Optional[str] = None + #: The current package if a codemod is being executed against a file that + #: lives on disk, and the repository root is correctly configured. This + #: Will take the form of a dotted name such as ``foo.bar`` for a file + #: in the repo named ``foo/bar/baz.py`` + full_package_name: Optional[str] = None + #: The current top level metadata wrapper for the module being modified. #: To access computed metadata when inside an actively running codemod, use #: the :meth:`~libcst.MetadataDependent.get_metadata` method on diff --git a/libcst/codemod/tests/test_cli.py b/libcst/codemod/tests/test_cli.py deleted file mode 100644 index 9c1834e5..00000000 --- a/libcst/codemod/tests/test_cli.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -from typing import Optional - -from libcst.codemod._cli import _calculate_module -from libcst.testing.utils import data_provider, UnitTest - - -class TestPackageCalculation(UnitTest): - @data_provider( - ( - # Providing no root should give back no module. - (None, "/some/dummy/file.py", None), - # Providing a file outside the root should give back no module. - ("/home/username/root", "/some/dummy/file.py", None), - ("/home/username/root/", "/some/dummy/file.py", None), - ("/home/username/root", "/home/username/file.py", None), - # Various files inside the root should give back valid modules. - ("/home/username/root", "/home/username/root/file.py", "file"), - ("/home/username/root/", "/home/username/root/file.py", "file"), - ( - "/home/username/root/", - "/home/username/root/some/dir/file.py", - "some.dir.file", - ), - # Various special files inside the root should give back valid modules. - ( - "/home/username/root/", - "/home/username/root/some/dir/__init__.py", - "some.dir", - ), - ( - "/home/username/root/", - "/home/username/root/some/dir/__main__.py", - "some.dir", - ), - # some windows tests - ( - "c:/Program Files/", - "d:/Program Files/some/dir/file.py", - None, - ), - ( - "c:/Program Files/other/", - "c:/Program Files/some/dir/file.py", - None, - ), - ( - "c:/Program Files/", - "c:/Program Files/some/dir/file.py", - "some.dir.file", - ), - ( - "c:/Program Files/", - "c:/Program Files/some/dir/__main__.py", - "some.dir", - ), - ), - ) - def test_calculate_module( - self, repo_root: Optional[str], filename: str, module: str - ) -> None: - self.assertEqual(_calculate_module(repo_root, filename), module) diff --git a/libcst/helpers/__init__.py b/libcst/helpers/__init__.py index ccd12c72..6f0db041 100644 --- a/libcst/helpers/__init__.py +++ b/libcst/helpers/__init__.py @@ -4,11 +4,6 @@ # LICENSE file in the root directory of this source tree. # -from libcst.helpers._statement import ( - get_absolute_module, - get_absolute_module_for_import, - get_absolute_module_for_import_or_raise, -) from libcst.helpers._template import ( parse_template_expression, parse_template_module, @@ -19,9 +14,17 @@ from libcst.helpers.expression import ( get_full_name_for_node, get_full_name_for_node_or_raise, ) -from libcst.helpers.module import insert_header_comments +from libcst.helpers.module import ( + calculate_module_and_package, + get_absolute_module, + get_absolute_module_for_import, + get_absolute_module_for_import_or_raise, + insert_header_comments, + ModuleNameAndPackage, +) __all__ = [ + "calculate_module_and_package", "get_absolute_module", "get_absolute_module_for_import", "get_absolute_module_for_import_or_raise", @@ -32,4 +35,5 @@ __all__ = [ "parse_template_module", "parse_template_statement", "parse_template_expression", + "ModuleNameAndPackage", ] diff --git a/libcst/helpers/_statement.py b/libcst/helpers/_statement.py deleted file mode 100644 index f62a5eb8..00000000 --- a/libcst/helpers/_statement.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -from typing import Optional - -import libcst as cst -from libcst.helpers.expression import get_full_name_for_node - - -def get_absolute_module( - current_module: Optional[str], module_name: Optional[str], num_dots: int -) -> Optional[str]: - if num_dots == 0: - # This is an absolute import, so the module is correct. - return module_name - if current_module is None: - # We don't actually have the current module available, so we can't compute - # the absolute module from relative. - return None - # We have the current module, as well as the relative, let's compute the base. - modules = current_module.split(".") - if len(modules) < num_dots: - # This relative import goes past the base of the repository, so we can't calculate it. - return None - base_module = ".".join(modules[:-num_dots]) - # Finally, if the module name was supplied, append it to the end. - if module_name is not None: - # If we went all the way to the top, the base module should be empty, so we - # should return the relative bit as absolute. Otherwise, combine the base - # module and module name using a dot separator. - base_module = ( - f"{base_module}.{module_name}" if len(base_module) > 0 else module_name - ) - # If they tried to import all the way to the root, return None. Otherwise, - # return the module itself. - return base_module if len(base_module) > 0 else None - - -def get_absolute_module_for_import( - current_module: Optional[str], import_node: cst.ImportFrom -) -> Optional[str]: - # First, let's try to grab the module name, regardless of relative status. - module = import_node.module - module_name = get_full_name_for_node(module) if module is not None else None - # Now, get the relative import location if it exists. - num_dots = len(import_node.relative) - return get_absolute_module(current_module, module_name, num_dots) - - -def get_absolute_module_for_import_or_raise( - current_module: Optional[str], import_node: cst.ImportFrom -) -> str: - module = get_absolute_module_for_import(current_module, import_node) - if module is None: - raise Exception(f"Unable to compute absolute module for {import_node}") - return module diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 50e42ff7..f9ba41aa 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -3,13 +3,16 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # +from dataclasses import dataclass from itertools import islice -from typing import List +from pathlib import PurePath +from typing import List, Optional -import libcst +from libcst import Comment, EmptyLine, ImportFrom, Module +from libcst.helpers.expression import get_full_name_for_node -def insert_header_comments(node: libcst.Module, comments: List[str]) -> libcst.Module: +def insert_header_comments(node: Module, comments: List[str]) -> Module: """ Insert comments after last non-empty line in header. Use this to insert one or more comments after any copyright preamble in a :class:`~libcst.Module`. Each comment in @@ -25,9 +28,91 @@ def insert_header_comments(node: libcst.Module, comments: List[str]) -> libcst.M comment_lines = islice(node.header, last_comment_index + 1) empty_lines = islice(node.header, last_comment_index + 1, None) - inserted_lines = [ - libcst.EmptyLine(comment=libcst.Comment(value=comment)) for comment in comments - ] + inserted_lines = [EmptyLine(comment=Comment(value=comment)) for comment in comments] # pyre-fixme[60]: Concatenation not yet support for multiple variadic tuples: # `*comment_lines, *inserted_lines, *empty_lines`. return node.with_changes(header=(*comment_lines, *inserted_lines, *empty_lines)) + + +def get_absolute_module( + current_module: Optional[str], module_name: Optional[str], num_dots: int +) -> Optional[str]: + if num_dots == 0: + # This is an absolute import, so the module is correct. + return module_name + if current_module is None: + # We don't actually have the current module available, so we can't compute + # the absolute module from relative. + return None + # We have the current module, as well as the relative, let's compute the base. + modules = current_module.split(".") + if len(modules) < num_dots: + # This relative import goes past the base of the repository, so we can't calculate it. + return None + base_module = ".".join(modules[:-num_dots]) + # Finally, if the module name was supplied, append it to the end. + if module_name is not None: + # If we went all the way to the top, the base module should be empty, so we + # should return the relative bit as absolute. Otherwise, combine the base + # module and module name using a dot separator. + base_module = ( + f"{base_module}.{module_name}" if len(base_module) > 0 else module_name + ) + # If they tried to import all the way to the root, return None. Otherwise, + # return the module itself. + return base_module if len(base_module) > 0 else None + + +def get_absolute_module_for_import( + current_module: Optional[str], import_node: ImportFrom +) -> Optional[str]: + # First, let's try to grab the module name, regardless of relative status. + module = import_node.module + module_name = get_full_name_for_node(module) if module is not None else None + # Now, get the relative import location if it exists. + num_dots = len(import_node.relative) + return get_absolute_module(current_module, module_name, num_dots) + + +def get_absolute_module_for_import_or_raise( + current_module: Optional[str], import_node: ImportFrom +) -> str: + module = get_absolute_module_for_import(current_module, import_node) + if module is None: + raise Exception(f"Unable to compute absolute module for {import_node}") + return module + + +@dataclass(frozen=True) +class ModuleNameAndPackage: + name: str + package: str + + +def calculate_module_and_package( + repo_root: Optional[str], filename: str +) -> Optional[ModuleNameAndPackage]: + # Given an absolute repo_root and an absolute filename, calculate the + # python module name for the file. + if repo_root is None: + # We don't have a repo root, so this is impossible to calculate. + return None + + try: + relative_filename = PurePath(filename).relative_to(repo_root) + except ValueError: + # This file seems to be out of the repo root. + return None + + # get rid of extension + relative_filename = relative_filename.with_suffix("") + + # handle special cases + if relative_filename.stem in ["__init__", "__main__"]: + relative_filename = relative_filename.parent + package = name = ".".join(relative_filename.parts) + else: + name = ".".join(relative_filename.parts) + package = ".".join(relative_filename.parts[:-1]) + + return ModuleNameAndPackage(name, package) diff --git a/libcst/helpers/tests/test_module.py b/libcst/helpers/tests/test_module.py index 687e0260..da9dab71 100644 --- a/libcst/helpers/tests/test_module.py +++ b/libcst/helpers/tests/test_module.py @@ -3,9 +3,18 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -import libcst -from libcst.helpers import insert_header_comments -from libcst.testing.utils import UnitTest +from typing import Optional + +import libcst as cst +from libcst.helpers.common import ensure_type +from libcst.helpers.module import ( + calculate_module_and_package, + get_absolute_module_for_import, + get_absolute_module_for_import_or_raise, + insert_header_comments, + ModuleNameAndPackage, +) +from libcst.testing.utils import data_provider, UnitTest class ModuleTest(UnitTest): @@ -18,7 +27,7 @@ class ModuleTest(UnitTest): expected_code = "\n".join( comment_lines + inserted_comments + empty_lines + non_header_line ) - node = libcst.parse_module(original_code) + node = cst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) @@ -26,7 +35,7 @@ class ModuleTest(UnitTest): # No comment case original_code = "\n".join(empty_lines + non_header_line) expected_code = "\n".join(inserted_comments + empty_lines + non_header_line) - node = libcst.parse_module(original_code) + node = cst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) @@ -34,7 +43,7 @@ class ModuleTest(UnitTest): # No empty lines case original_code = "\n".join(comment_lines + non_header_line) expected_code = "\n".join(comment_lines + inserted_comments + non_header_line) - node = libcst.parse_module(original_code) + node = cst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) @@ -45,7 +54,7 @@ class ModuleTest(UnitTest): expected_code = "\n".join( comment_lines + inserted_comments + empty_lines + non_header_line ) - node = libcst.parse_module(original_code) + node = cst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) @@ -53,7 +62,146 @@ class ModuleTest(UnitTest): # No header case original_code = "\n".join(non_header_line) expected_code = "\n".join(inserted_comments + non_header_line) - node = libcst.parse_module(original_code) + node = cst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) + + @data_provider( + ( + # Simple imports that are already absolute. + (None, "from a.b import c", "a.b"), + ("x.y.z", "from a.b import c", "a.b"), + # Relative import that can't be resolved due to missing module. + (None, "from ..w import c", None), + # Relative import that goes past the module level. + ("x", "from ...y import z", None), + ("x.y.z", "from .....w import c", None), + ("x.y.z", "from ... import c", None), + # Correct resolution of absolute from relative modules. + ("x.y.z", "from . import c", "x.y"), + ("x.y.z", "from .. import c", "x"), + ("x.y.z", "from .w import c", "x.y.w"), + ("x.y.z", "from ..w import c", "x.w"), + ("x.y.z", "from ...w import c", "w"), + ) + ) + def test_get_absolute_module( + self, + module: Optional[str], + importfrom: str, + output: Optional[str], + ) -> None: + node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine) + assert len(node.body) == 1, "Unexpected number of statements!" + import_node = ensure_type(node.body[0], cst.ImportFrom) + + self.assertEqual(get_absolute_module_for_import(module, import_node), output) + if output is None: + with self.assertRaises(Exception): + get_absolute_module_for_import_or_raise(module, import_node) + else: + self.assertEqual( + get_absolute_module_for_import_or_raise(module, import_node), output + ) + + @data_provider( + ( + # Nodes without an asname + (cst.ImportAlias(name=cst.Name("foo")), "foo", None), + ( + cst.ImportAlias(name=cst.Attribute(cst.Name("foo"), cst.Name("bar"))), + "foo.bar", + None, + ), + # Nodes with an asname + ( + cst.ImportAlias( + name=cst.Name("foo"), asname=cst.AsName(name=cst.Name("baz")) + ), + "foo", + "baz", + ), + ( + cst.ImportAlias( + name=cst.Attribute(cst.Name("foo"), cst.Name("bar")), + asname=cst.AsName(name=cst.Name("baz")), + ), + "foo.bar", + "baz", + ), + ) + ) + def test_importalias_helpers( + self, alias_node: cst.ImportAlias, full_name: str, alias: Optional[str] + ) -> None: + self.assertEqual(alias_node.evaluated_name, full_name) + self.assertEqual(alias_node.evaluated_alias, alias) + + @data_provider( + ( + # Providing no root should give back no module. + (None, "/some/dummy/file.py", None), + # Providing a file outside the root should give back no module. + ("/home/username/root", "/some/dummy/file.py", None), + ("/home/username/root/", "/some/dummy/file.py", None), + ("/home/username/root", "/home/username/file.py", None), + # Various files inside the root should give back valid modules. + ( + "/home/username/root", + "/home/username/root/file.py", + ModuleNameAndPackage("file", ""), + ), + ( + "/home/username/root/", + "/home/username/root/file.py", + ModuleNameAndPackage("file", ""), + ), + ( + "/home/username/root/", + "/home/username/root/some/dir/file.py", + ModuleNameAndPackage("some.dir.file", "some.dir"), + ), + # Various special files inside the root should give back valid modules. + ( + "/home/username/root/", + "/home/username/root/some/dir/__init__.py", + ModuleNameAndPackage("some.dir", "some.dir"), + ), + ( + "/home/username/root/", + "/home/username/root/some/dir/__main__.py", + ModuleNameAndPackage("some.dir", "some.dir"), + ), + # some windows tests + ( + "c:/Program Files/", + "d:/Program Files/some/dir/file.py", + None, + ), + ( + "c:/Program Files/other/", + "c:/Program Files/some/dir/file.py", + None, + ), + ( + "c:/Program Files/", + "c:/Program Files/some/dir/file.py", + ModuleNameAndPackage("some.dir.file", "some.dir"), + ), + ( + "c:/Program Files/", + "c:/Program Files/some/dir/__main__.py", + ModuleNameAndPackage("some.dir", "some.dir"), + ), + ), + ) + def test_calculate_module_and_package( + self, + repo_root: Optional[str], + filename: str, + module_and_package: Optional[ModuleNameAndPackage], + ) -> None: + self.assertEqual( + calculate_module_and_package(repo_root, filename), module_and_package + ) diff --git a/libcst/helpers/tests/test_statement.py b/libcst/helpers/tests/test_statement.py deleted file mode 100644 index f26900bd..00000000 --- a/libcst/helpers/tests/test_statement.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -from typing import Optional - -import libcst as cst -from libcst.helpers import ( - ensure_type, - get_absolute_module_for_import, - get_absolute_module_for_import_or_raise, -) -from libcst.testing.utils import data_provider, UnitTest - - -class StatementTest(UnitTest): - @data_provider( - ( - # Simple imports that are already absolute. - (None, "from a.b import c", "a.b"), - ("x.y.z", "from a.b import c", "a.b"), - # Relative import that can't be resolved due to missing module. - (None, "from ..w import c", None), - # Relative import that goes past the module level. - ("x", "from ...y import z", None), - ("x.y.z", "from .....w import c", None), - ("x.y.z", "from ... import c", None), - # Correct resolution of absolute from relative modules. - ("x.y.z", "from . import c", "x.y"), - ("x.y.z", "from .. import c", "x"), - ("x.y.z", "from .w import c", "x.y.w"), - ("x.y.z", "from ..w import c", "x.w"), - ("x.y.z", "from ...w import c", "w"), - ) - ) - def test_get_absolute_module( - self, - module: Optional[str], - importfrom: str, - output: Optional[str], - ) -> None: - node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine) - assert len(node.body) == 1, "Unexpected number of statements!" - import_node = ensure_type(node.body[0], cst.ImportFrom) - - self.assertEqual(get_absolute_module_for_import(module, import_node), output) - if output is None: - with self.assertRaises(Exception): - get_absolute_module_for_import_or_raise(module, import_node) - else: - self.assertEqual( - get_absolute_module_for_import_or_raise(module, import_node), output - ) - - @data_provider( - ( - # Nodes without an asname - (cst.ImportAlias(name=cst.Name("foo")), "foo", None), - ( - cst.ImportAlias(name=cst.Attribute(cst.Name("foo"), cst.Name("bar"))), - "foo.bar", - None, - ), - # Nodes with an asname - ( - cst.ImportAlias( - name=cst.Name("foo"), asname=cst.AsName(name=cst.Name("baz")) - ), - "foo", - "baz", - ), - ( - cst.ImportAlias( - name=cst.Attribute(cst.Name("foo"), cst.Name("bar")), - asname=cst.AsName(name=cst.Name("baz")), - ), - "foo.bar", - "baz", - ), - ) - ) - def test_importalias_helpers( - self, alias_node: cst.ImportAlias, full_name: str, alias: Optional[str] - ) -> None: - self.assertEqual(alias_node.evaluated_name, full_name) - self.assertEqual(alias_node.evaluated_alias, alias) From e5ab7b90b4c9cd1f46e5b875ad317411abf48298 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 24 Mar 2022 11:31:48 -0400 Subject: [PATCH 231/632] Drop support for running libcst using a python 3.6 interpreter (#663) * Drop support for running libcst using a python 3.6 interpreter * PR feedback --- .github/workflows/build.yml | 2 +- README.rst | 2 +- libcst/_type_enforce.py | 29 ++++++++++++----------------- pyproject.toml | 2 +- setup.py | 7 ++++--- 5 files changed, 19 insertions(+), 23 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bb490da6..d4167652 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -14,7 +14,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] + python-version: [3.7, 3.8, 3.9, "3.10"] parser: [pure, native] steps: - uses: actions/checkout@v1 diff --git a/README.rst b/README.rst index f5a09998..710e2223 100644 --- a/README.rst +++ b/README.rst @@ -125,7 +125,7 @@ For a more detailed usage example, `see our documentation Installation ------------ -LibCST requires Python 3.6+ and can be easily installed using most common Python +LibCST requires Python 3.7+ and can be easily installed using most common Python packaging tools. We recommend installing the latest stable release from `PyPI `_ with pip: diff --git a/libcst/_type_enforce.py b/libcst/_type_enforce.py index 3eb72e77..b13c41de 100644 --- a/libcst/_type_enforce.py +++ b/libcst/_type_enforce.py @@ -3,17 +3,19 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, Iterable, Mapping, MutableMapping, MutableSequence, Tuple +from typing import ( + Any, + ForwardRef, + Iterable, + Mapping, + MutableMapping, + MutableSequence, + Tuple, +) from typing_extensions import Literal from typing_inspect import get_args, get_origin, is_classvar, is_typevar, is_union_type -try: # py37+ - from typing import ForwardRef -except ImportError: # py36 - # pyre-fixme[21]: Could not find name `_ForwardRef` in `typing` (stubbed). - from typing import _ForwardRef as ForwardRef - def is_value_of_type( # noqa: C901 "too complex" # pyre-fixme[2]: Parameter annotation cannot be `Any`. @@ -47,12 +49,8 @@ def is_value_of_type( # noqa: C901 "too complex" - Type[...] """ if is_classvar(expected_type): - # `ClassVar` (no subscript) is implicitly `ClassVar[Any]` - if hasattr(expected_type, "__type__"): # py36 - expected_type = expected_type.__type__ or Any - else: # py37+ - classvar_args = get_args(expected_type) - expected_type = (classvar_args[0] or Any) if classvar_args else Any + classvar_args = get_args(expected_type) + expected_type = (classvar_args[0] or Any) if classvar_args else Any if is_typevar(expected_type): # treat this the same as Any @@ -70,10 +68,7 @@ def is_value_of_type( # noqa: C901 "too complex" ) elif isinstance(expected_origin_type, type(Literal)): - if hasattr(expected_type, "__values__"): # py36 - literal_values = expected_type.__values__ - else: # py37+ - literal_values = get_args(expected_type, evaluate=True) + literal_values = get_args(expected_type, evaluate=True) return any(value == literal for literal in literal_values) elif isinstance(expected_origin_type, ForwardRef): diff --git a/pyproject.toml b/pyproject.toml index 84cfc628..986a2339 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,4 +9,4 @@ excludes = ["native/", "stubs/"] exclude-modules = '^libcst\.(testing|tests)' [build-system] -requires = ["setuptools", "wheel", "setuptools-rust"] \ No newline at end of file +requires = ["setuptools", "wheel", "setuptools-rust"] diff --git a/setup.py b/setup.py index 0418cf26..4fa892dd 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ setuptools.setup( ), }, name="libcst", - description="A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7 and 3.8 programs.", + description="A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs.", long_description=long_description, long_description_content_type="text/x-rst", url="https://github.com/Instagram/LibCST", @@ -42,7 +42,7 @@ setuptools.setup( "libcst.codemod.tests": ["*"], }, test_suite="libcst", - python_requires=">=3.6", + python_requires=">=3.7", setup_requires=["setuptools_scm"], install_requires=[dep.strip() for dep in open("requirements.txt").readlines()], extras_require={ @@ -62,9 +62,10 @@ setuptools.setup( classifiers=[ "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", ], zip_safe=False, # for mypy compatibility https://mypy.readthedocs.io/en/latest/installed_packages.html ) From cf16eccea4d7efc63227606b9e75cfb724f8275b Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 24 Mar 2022 17:21:08 -0400 Subject: [PATCH 232/632] Update relative import logic to match cpython (#660) * Always compute a module and package name * Update name_provider to correctly support __main__ (also updated the tests to use data_provider) * Update name_provider to correctly handle relative imports and package name * Update relative module resolution to work on package names * Use full_package_name in libcst.codemod.visitors.GatherImportsVisitor * Use full_package_name in libcst.codemod.visitors.RemovedNodeVisitor * Use full_package_name in libcst.codemod.visitors.AddImportsVisitor * Fix failing test * Fix typo in variable name * PR feedback * Force rebuild --- libcst/codemod/_cli.py | 14 +- libcst/codemod/visitors/_add_imports.py | 4 +- libcst/codemod/visitors/_gather_imports.py | 2 +- libcst/codemod/visitors/_imports.py | 6 +- libcst/codemod/visitors/_remove_imports.py | 8 +- .../visitors/tests/test_add_imports.py | 32 +++-- .../visitors/tests/test_gather_imports.py | 2 +- .../visitors/tests/test_remove_imports.py | 12 +- libcst/helpers/module.py | 52 +++---- libcst/helpers/tests/test_module.py | 82 +++++++---- libcst/metadata/name_provider.py | 62 ++++---- libcst/metadata/tests/test_name_provider.py | 133 ++++++++++-------- 12 files changed, 231 insertions(+), 178 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 4726a34f..7863ac39 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -251,16 +251,20 @@ def _execute_transform( # noqa: C901 scratch={}, ) - # attempt to work out the module and package name for this file - module_name_and_package = calculate_module_and_package( - config.repo_root, filename - ) - if module_name_and_package is not None: + # determine the module and package name for this file + try: + module_name_and_package = calculate_module_and_package( + config.repo_root or ".", filename + ) transformer.context = replace( transformer.context, full_module_name=module_name_and_package.name, full_package_name=module_name_and_package.package, ) + except ValueError as ex: + print( + f"Failed to determine module name for {filename}: {ex}", file=sys.stderr + ) # Run the transform, bail if we failed or if we aren't formatting code try: diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index 64131dd6..95188561 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -122,7 +122,7 @@ class AddImportsVisitor(ContextAwareTransformer): raise Exception("Cannot import __future__ objects with aliases!") # Resolve relative imports if we have a module name - imps = [imp.resolve_relative(self.context.full_module_name) for imp in imps] + imps = [imp.resolve_relative(self.context.full_package_name) for imp in imps] # List of modules we need to ensure are imported self.module_imports: Set[str] = { @@ -215,7 +215,7 @@ class AddImportsVisitor(ContextAwareTransformer): # Get the module we're importing as a string, see if we have work to do. module = get_absolute_module_for_import( - self.context.full_module_name, updated_node + self.context.full_package_name, updated_node ) if ( module is None diff --git a/libcst/codemod/visitors/_gather_imports.py b/libcst/codemod/visitors/_gather_imports.py index 5d5a50f9..14760798 100644 --- a/libcst/codemod/visitors/_gather_imports.py +++ b/libcst/codemod/visitors/_gather_imports.py @@ -79,7 +79,7 @@ class GatherImportsVisitor(ContextAwareVisitor): self.all_imports.append(node) # Get the module we're importing as a string. - module = get_absolute_module_for_import(self.context.full_module_name, node) + module = get_absolute_module_for_import(self.context.full_package_name, node) if module is None: # Can't get the absolute import from relative, so we can't # support this. diff --git a/libcst/codemod/visitors/_imports.py b/libcst/codemod/visitors/_imports.py index 5a703112..8e8697e8 100644 --- a/libcst/codemod/visitors/_imports.py +++ b/libcst/codemod/visitors/_imports.py @@ -31,13 +31,13 @@ class ImportItem: def module(self) -> str: return "." * self.relative + self.module_name - def resolve_relative(self, base_module: Optional[str]) -> "ImportItem": + def resolve_relative(self, package_name: Optional[str]) -> "ImportItem": """Return an ImportItem with an absolute module name if possible.""" mod = self # `import ..a` -> `from .. import a` if mod.relative and mod.obj_name is None: mod = replace(mod, module_name="", obj_name=mod.module_name) - if base_module is None: + if package_name is None: return mod - m = get_absolute_module(base_module, mod.module_name or None, self.relative) + m = get_absolute_module(package_name, mod.module_name or None, self.relative) return mod if m is None else replace(mod, module_name=m, relative=0) diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 4c53ef19..2a90a351 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -39,7 +39,7 @@ class RemovedNodeVisitor(ContextAwareVisitor): return module_name = get_absolute_module_for_import( - self.context.full_module_name, import_node + self.context.full_package_name, import_node ) if module_name is None: raise Exception("Cannot look up absolute module from relative import!") @@ -248,7 +248,9 @@ class RemoveImportsVisitor(ContextAwareTransformer): if isinstance(names, cst.ImportStar): # We don't handle removing this, so ignore it. return - module_name = get_absolute_module_for_import(context.full_module_name, node) + module_name = get_absolute_module_for_import( + context.full_package_name, node + ) if module_name is None: raise Exception("Cannot look up absolute module from relative import!") for import_alias in names: @@ -414,7 +416,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): # Make sure we actually know the absolute module. module_name = get_absolute_module_for_import( - self.context.full_module_name, updated_node + self.context.full_package_name, updated_node ) if module_name is None or module_name not in self.unused_obj_imports: # This node isn't on our list of todos, so let's bail. diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index 6a88b335..e7a191a9 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -590,7 +590,9 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("a.b.c", "D", None)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_add_object_relative_modify_simple(self) -> None: @@ -621,7 +623,9 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("a.b.c", "D", None)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_import_order(self) -> None: @@ -644,7 +648,9 @@ class TestAddImportsCodemod(CodemodTest): ImportItem("a", "c", None), ImportItem("a", "d", "x"), ], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_add_explicit_relative(self) -> None: @@ -759,7 +765,9 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("c", "D", None, 2)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_add_object_explicit_relative_modify_simple(self) -> None: @@ -790,7 +798,9 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("c", "D", None, 2)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_add_object_resolve_explicit_relative_modify_simple(self) -> None: @@ -821,7 +831,9 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("c", "D", None, 2)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_add_object_resolve_dotted_relative_modify_simple(self) -> None: @@ -852,7 +864,9 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("..c", "D", None)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_import_in_docstring_module(self) -> None: @@ -873,5 +887,7 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("__future__", "annotations", None)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) diff --git a/libcst/codemod/visitors/tests/test_gather_imports.py b/libcst/codemod/visitors/tests/test_gather_imports.py index 3e5d6b99..4fbdbad2 100644 --- a/libcst/codemod/visitors/tests/test_gather_imports.py +++ b/libcst/codemod/visitors/tests/test_gather_imports.py @@ -12,7 +12,7 @@ from libcst.testing.utils import UnitTest class TestGatherImportsVisitor(UnitTest): def gather_imports(self, code: str) -> GatherImportsVisitor: transform_instance = GatherImportsVisitor( - CodemodContext(full_module_name="a.b.foobar") + CodemodContext(full_module_name="a.b.foobar", full_package_name="a.b") ) input_tree = parse_module(CodemodTest.make_fixture_data(code)) input_tree.visit(transform_instance) diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index 93e5d7a5..11db2e9f 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -419,7 +419,9 @@ class TestRemoveImportsCodemod(CodemodTest): before, after, [("a.b.c", "qux", None)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_dont_remove_inuse_importfrom_relative(self) -> None: @@ -446,7 +448,9 @@ class TestRemoveImportsCodemod(CodemodTest): before, after, [("a.b.c", "qux", None)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_dont_remove_wrong_importfrom_relative(self) -> None: @@ -473,7 +477,9 @@ class TestRemoveImportsCodemod(CodemodTest): before, after, [("a.b.d", "qux", None)], - context_override=CodemodContext(full_module_name="a.b.foobar"), + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), ) def test_remove_import_complex(self) -> None: diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index f9ba41aa..6a779594 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -35,49 +35,41 @@ def insert_header_comments(node: Module, comments: List[str]) -> Module: def get_absolute_module( - current_module: Optional[str], module_name: Optional[str], num_dots: int + current_package: Optional[str], module_name: Optional[str], num_dots: int ) -> Optional[str]: if num_dots == 0: # This is an absolute import, so the module is correct. return module_name - if current_module is None: + if current_package is None: # We don't actually have the current module available, so we can't compute # the absolute module from relative. return None - # We have the current module, as well as the relative, let's compute the base. - modules = current_module.split(".") - if len(modules) < num_dots: - # This relative import goes past the base of the repository, so we can't calculate it. + + # see importlib._bootstrap._resolve_name + # https://github.com/python/cpython/blob/3.10/Lib/importlib/_bootstrap.py#L902 + bits = current_package.rsplit(".", num_dots - 1) + if len(bits) < num_dots: return None - base_module = ".".join(modules[:-num_dots]) - # Finally, if the module name was supplied, append it to the end. - if module_name is not None: - # If we went all the way to the top, the base module should be empty, so we - # should return the relative bit as absolute. Otherwise, combine the base - # module and module name using a dot separator. - base_module = ( - f"{base_module}.{module_name}" if len(base_module) > 0 else module_name - ) - # If they tried to import all the way to the root, return None. Otherwise, - # return the module itself. - return base_module if len(base_module) > 0 else None + + base = bits[0] + return "{}.{}".format(base, module_name) if module_name else base def get_absolute_module_for_import( - current_module: Optional[str], import_node: ImportFrom + current_package: Optional[str], import_node: ImportFrom ) -> Optional[str]: # First, let's try to grab the module name, regardless of relative status. module = import_node.module module_name = get_full_name_for_node(module) if module is not None else None # Now, get the relative import location if it exists. num_dots = len(import_node.relative) - return get_absolute_module(current_module, module_name, num_dots) + return get_absolute_module(current_package, module_name, num_dots) def get_absolute_module_for_import_or_raise( - current_module: Optional[str], import_node: ImportFrom + current_package: Optional[str], import_node: ImportFrom ) -> str: - module = get_absolute_module_for_import(current_module, import_node) + module = get_absolute_module_for_import(current_package, import_node) if module is None: raise Exception(f"Unable to compute absolute module for {import_node}") return module @@ -89,22 +81,10 @@ class ModuleNameAndPackage: package: str -def calculate_module_and_package( - repo_root: Optional[str], filename: str -) -> Optional[ModuleNameAndPackage]: +def calculate_module_and_package(repo_root: str, filename: str) -> ModuleNameAndPackage: # Given an absolute repo_root and an absolute filename, calculate the # python module name for the file. - if repo_root is None: - # We don't have a repo root, so this is impossible to calculate. - return None - - try: - relative_filename = PurePath(filename).relative_to(repo_root) - except ValueError: - # This file seems to be out of the repo root. - return None - - # get rid of extension + relative_filename = PurePath(filename).relative_to(repo_root) relative_filename = relative_filename.with_suffix("") # handle special cases diff --git a/libcst/helpers/tests/test_module.py b/libcst/helpers/tests/test_module.py index da9dab71..0adc123e 100644 --- a/libcst/helpers/tests/test_module.py +++ b/libcst/helpers/tests/test_module.py @@ -71,38 +71,50 @@ class ModuleTest(UnitTest): ( # Simple imports that are already absolute. (None, "from a.b import c", "a.b"), - ("x.y.z", "from a.b import c", "a.b"), + ("x/y/z.py", "from a.b import c", "a.b"), + ("x/y/z/__init__.py", "from a.b import c", "a.b"), # Relative import that can't be resolved due to missing module. (None, "from ..w import c", None), # Relative import that goes past the module level. - ("x", "from ...y import z", None), - ("x.y.z", "from .....w import c", None), - ("x.y.z", "from ... import c", None), + ("x.py", "from ...y import z", None), + ("x/y/z.py", "from ... import c", None), + ("x/y/z.py", "from ...w import c", None), + ("x/y/z/__init__.py", "from .... import c", None), + ("x/y/z/__init__.py", "from ....w import c", None), # Correct resolution of absolute from relative modules. - ("x.y.z", "from . import c", "x.y"), - ("x.y.z", "from .. import c", "x"), - ("x.y.z", "from .w import c", "x.y.w"), - ("x.y.z", "from ..w import c", "x.w"), - ("x.y.z", "from ...w import c", "w"), + ("x/y/z.py", "from . import c", "x.y"), + ("x/y/z.py", "from .. import c", "x"), + ("x/y/z.py", "from .w import c", "x.y.w"), + ("x/y/z.py", "from ..w import c", "x.w"), + ("x/y/z/__init__.py", "from . import c", "x.y.z"), + ("x/y/z/__init__.py", "from .. import c", "x.y"), + ("x/y/z/__init__.py", "from ... import c", "x"), + ("x/y/z/__init__.py", "from .w import c", "x.y.z.w"), + ("x/y/z/__init__.py", "from ..w import c", "x.y.w"), + ("x/y/z/__init__.py", "from ...w import c", "x.w"), ) ) def test_get_absolute_module( self, - module: Optional[str], + filename: Optional[str], importfrom: str, output: Optional[str], ) -> None: + package = None + if filename is not None: + info = calculate_module_and_package(".", filename) + package = info.package node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine) assert len(node.body) == 1, "Unexpected number of statements!" import_node = ensure_type(node.body[0], cst.ImportFrom) - self.assertEqual(get_absolute_module_for_import(module, import_node), output) + self.assertEqual(get_absolute_module_for_import(package, import_node), output) if output is None: with self.assertRaises(Exception): - get_absolute_module_for_import_or_raise(module, import_node) + get_absolute_module_for_import_or_raise(package, import_node) else: self.assertEqual( - get_absolute_module_for_import_or_raise(module, import_node), output + get_absolute_module_for_import_or_raise(package, import_node), output ) @data_provider( @@ -140,12 +152,6 @@ class ModuleTest(UnitTest): @data_provider( ( - # Providing no root should give back no module. - (None, "/some/dummy/file.py", None), - # Providing a file outside the root should give back no module. - ("/home/username/root", "/some/dummy/file.py", None), - ("/home/username/root/", "/some/dummy/file.py", None), - ("/home/username/root", "/home/username/file.py", None), # Various files inside the root should give back valid modules. ( "/home/username/root", @@ -173,17 +179,6 @@ class ModuleTest(UnitTest): "/home/username/root/some/dir/__main__.py", ModuleNameAndPackage("some.dir", "some.dir"), ), - # some windows tests - ( - "c:/Program Files/", - "d:/Program Files/some/dir/file.py", - None, - ), - ( - "c:/Program Files/other/", - "c:/Program Files/some/dir/file.py", - None, - ), ( "c:/Program Files/", "c:/Program Files/some/dir/file.py", @@ -198,10 +193,35 @@ class ModuleTest(UnitTest): ) def test_calculate_module_and_package( self, - repo_root: Optional[str], + repo_root: str, filename: str, module_and_package: Optional[ModuleNameAndPackage], ) -> None: self.assertEqual( calculate_module_and_package(repo_root, filename), module_and_package ) + + @data_provider( + ( + # Providing a file outside the root should raise an exception + ("/home/username/root", "/some/dummy/file.py"), + ("/home/username/root/", "/some/dummy/file.py"), + ("/home/username/root", "/home/username/file.py"), + # some windows tests + ( + "c:/Program Files/", + "d:/Program Files/some/dir/file.py", + ), + ( + "c:/Program Files/other/", + "c:/Program Files/some/dir/file.py", + ), + ) + ) + def test_invalid_module_and_package( + self, + repo_root: str, + filename: str, + ) -> None: + with self.assertRaises(ValueError): + calculate_module_and_package(repo_root, filename) diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py index 174aff73..00753504 100644 --- a/libcst/metadata/name_provider.py +++ b/libcst/metadata/name_provider.py @@ -4,12 +4,12 @@ # LICENSE file in the root directory of this source tree. import dataclasses -import re from pathlib import Path -from typing import Collection, List, Mapping, Optional, Pattern, Union +from typing import Collection, List, Mapping, Optional, Union import libcst as cst from libcst._metadata_dependent import MetadataDependent +from libcst.helpers.module import calculate_module_and_package, ModuleNameAndPackage from libcst.metadata.base_provider import BatchableMetadataProvider from libcst.metadata.scope_provider import ( QualifiedName, @@ -85,13 +85,6 @@ class QualifiedNameVisitor(cst.CSTVisitor): return True -DOT_PY: Pattern[str] = re.compile(r"(__init__)?\.py$") - - -def _module_name(path: str) -> Optional[str]: - return DOT_PY.sub("", path).replace("/", ".").rstrip(".") - - class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedName]]): """ Provide fully qualified names for CST nodes. Like :class:`QualifiedNameProvider`, @@ -118,16 +111,17 @@ class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedN @classmethod def gen_cache( cls, root_path: Path, paths: List[str], timeout: Optional[int] = None - ) -> Mapping[str, object]: - cache = {path: _module_name(path) for path in paths} + ) -> Mapping[str, ModuleNameAndPackage]: + cache = {path: calculate_module_and_package(".", path) for path in paths} return cache - def __init__(self, cache: str) -> None: + def __init__(self, cache: ModuleNameAndPackage) -> None: super().__init__(cache) - self.module_name: str = cache + self.module_name: str = cache.name + self.package_name: str = cache.package def visit_Module(self, node: cst.Module) -> bool: - visitor = FullyQualifiedNameVisitor(self, self.module_name) + visitor = FullyQualifiedNameVisitor(self, self.module_name, self.package_name) node.visit(visitor) self.set_metadata( node, @@ -138,20 +132,25 @@ class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedN class FullyQualifiedNameVisitor(cst.CSTVisitor): @staticmethod - def _fully_qualify_local(module_name: str, qname: QualifiedName) -> str: - name = qname.name - if not name.startswith("."): - # not a relative import - return f"{module_name}.{name}" + def _fully_qualify_local(module_name: str, package_name: str, name: str) -> str: + abs_name = name.lstrip(".") + num_dots = len(name) - len(abs_name) + # handle relative import + if num_dots > 0: + name = abs_name + # see importlib._bootstrap._resolve_name + # https://github.com/python/cpython/blob/3.10/Lib/importlib/_bootstrap.py#L902 + bits = package_name.rsplit(".", num_dots - 1) + if len(bits) < num_dots: + raise ImportError("attempted relative import beyond top-level package") + module_name = bits[0] - # relative import - name = name.lstrip(".") - parts_to_strip = len(qname.name) - len(name) - target_module = ".".join(module_name.split(".")[: -1 * parts_to_strip]) - return f"{target_module}.{name}" + return f"{module_name}.{name}" @staticmethod - def _fully_qualify(module_name: str, qname: QualifiedName) -> QualifiedName: + def _fully_qualify( + module_name: str, package_name: str, qname: QualifiedName + ) -> QualifiedName: if qname.source == QualifiedNameSource.BUILTIN: # builtins are already fully qualified return qname @@ -159,11 +158,16 @@ class FullyQualifiedNameVisitor(cst.CSTVisitor): if qname.source == QualifiedNameSource.IMPORT and not name.startswith("."): # non-relative imports are already fully qualified return qname - new_name = FullyQualifiedNameVisitor._fully_qualify_local(module_name, qname) + new_name = FullyQualifiedNameVisitor._fully_qualify_local( + module_name, package_name, qname.name + ) return dataclasses.replace(qname, name=new_name) - def __init__(self, provider: FullyQualifiedNameProvider, module_name: str) -> None: + def __init__( + self, provider: FullyQualifiedNameProvider, module_name: str, package_name: str + ) -> None: self.module_name = module_name + self.package_name = package_name self.provider = provider def on_visit(self, node: cst.CSTNode) -> bool: @@ -172,7 +176,9 @@ class FullyQualifiedNameVisitor(cst.CSTVisitor): self.provider.set_metadata( node, { - FullyQualifiedNameVisitor._fully_qualify(self.module_name, qname) + FullyQualifiedNameVisitor._fully_qualify( + self.module_name, self.package_name, qname + ) for qname in qnames }, ) diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index 30a6acd8..a9c583c3 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -6,7 +6,7 @@ from pathlib import Path from tempfile import TemporaryDirectory from textwrap import dedent -from typing import Collection, Mapping, Optional, Set, Tuple +from typing import Collection, Dict, Mapping, Optional, Set, Tuple import libcst as cst from libcst import ensure_type @@ -19,7 +19,7 @@ from libcst.metadata import ( ) from libcst.metadata.full_repo_manager import FullRepoManager from libcst.metadata.name_provider import FullyQualifiedNameVisitor -from libcst.testing.utils import UnitTest +from libcst.testing.utils import data_provider, UnitTest def get_qualified_name_metadata_provider( @@ -416,65 +416,85 @@ class QualifiedNameProviderTest(UnitTest): class FullyQualifiedNameProviderTest(UnitTest): - def test_builtins(self) -> None: - qnames = get_fully_qualified_names( - "test/module.py", - """ - int(None) - """, + @data_provider( + ( + # test module names + ("a/b/c.py", "", {"a.b.c": QualifiedNameSource.LOCAL}), + ("a/b.py", "", {"a.b": QualifiedNameSource.LOCAL}), + ("a.py", "", {"a": QualifiedNameSource.LOCAL}), + ("a/b/__init__.py", "", {"a.b": QualifiedNameSource.LOCAL}), + ("a/b/__main__.py", "", {"a.b": QualifiedNameSource.LOCAL}), + # test builtinxsx + ( + "test/module.py", + "int(None)", + { + "test.module": QualifiedNameSource.LOCAL, + "builtins.int": QualifiedNameSource.BUILTIN, + "builtins.None": QualifiedNameSource.BUILTIN, + }, + ), + # test imports + ( + "some/test/module.py", + """ + from a.b import c as d + from . import rel + from .lol import rel2 + from .. import thing as rel3 + d, rel, rel2, rel3 + """, + { + "some.test.module": QualifiedNameSource.LOCAL, + "a.b.c": QualifiedNameSource.IMPORT, + "some.test.rel": QualifiedNameSource.IMPORT, + "some.test.lol.rel2": QualifiedNameSource.IMPORT, + "some.thing": QualifiedNameSource.IMPORT, + }, + ), + # test more imports + ( + "some/test/module/__init__.py", + """ + from . import rel + from .lol import rel2 + rel, rel2 + """, + { + "some.test.module": QualifiedNameSource.LOCAL, + "some.test.module.rel": QualifiedNameSource.IMPORT, + "some.test.module.lol.rel2": QualifiedNameSource.IMPORT, + }, + ), + # test locals + ( + "some/test/module.py", + """ + class X: + a: X + """, + { + "some.test.module": QualifiedNameSource.LOCAL, + "some.test.module.X": QualifiedNameSource.LOCAL, + "some.test.module.X.a": QualifiedNameSource.LOCAL, + }, + ), ) - module_name = QualifiedName( - name="test.module", source=QualifiedNameSource.LOCAL - ) - self.assertIn(module_name, qnames) - qnames -= {module_name} - self.assertEqual( - {"builtins.int", "builtins.None"}, + ) + def test_qnames( + self, file: str, code: str, names: Dict[str, QualifiedNameSource] + ) -> None: + qnames = get_fully_qualified_names(file, code) + self.assertSetEqual( + set(names.keys()), {qname.name for qname in qnames}, ) for qname in qnames: - self.assertEqual(qname.source, QualifiedNameSource.BUILTIN, msg=f"{qname}") - - def test_imports(self) -> None: - qnames = get_fully_qualified_names( - "some/test/module.py", - """ - from a.b import c as d - from . import rel - from .lol import rel2 - from .. import thing as rel3 - d, rel, rel2, rel3 - """, - ) - module_name = QualifiedName( - name="some.test.module", source=QualifiedNameSource.LOCAL - ) - self.assertIn(module_name, qnames) - qnames -= {module_name} - self.assertEqual( - {"a.b.c", "some.test.rel", "some.test.lol.rel2", "some.thing"}, - {qname.name for qname in qnames}, - ) - for qname in qnames: - self.assertEqual(qname.source, QualifiedNameSource.IMPORT, msg=f"{qname}") - - def test_locals(self) -> None: - qnames = get_fully_qualified_names( - "some/test/module.py", - """ - class X: - a: X - """, - ) - self.assertEqual( - {"some.test.module", "some.test.module.X", "some.test.module.X.a"}, - {qname.name for qname in qnames}, - ) - for qname in qnames: - self.assertEqual(qname.source, QualifiedNameSource.LOCAL, msg=f"{qname}") + self.assertEqual(qname.source, names[qname.name], msg=f"{qname}") def test_local_qualification(self) -> None: - base_module = "some.test.module" + module_name = "some.test.module" + package_name = "some.test" for (name, expected) in [ (".foo", "some.test.foo"), ("..bar", "some.bar"), @@ -483,8 +503,7 @@ class FullyQualifiedNameProviderTest(UnitTest): with self.subTest(name=name): self.assertEqual( FullyQualifiedNameVisitor._fully_qualify_local( - base_module, - QualifiedName(name=name, source=QualifiedNameSource.LOCAL), + module_name, package_name, name ), expected, ) From 489d812064d5915358f1cfd56154bcd28b96b755 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Fri, 25 Mar 2022 05:30:12 -0400 Subject: [PATCH 233/632] Pin jinja2 to a version that jimmy's fork of sphinx works with (#666) --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index dbebc185..24331a99 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -16,3 +16,4 @@ ufmt==1.3 usort==1.0.0rc1 setuptools-rust>=0.12.1 slotscheck>=0.7.1 +jinja2==3.0.3 From f027b84366194b9da5f2ccb5d665d82b4d50dea1 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 7 Apr 2022 09:49:33 -0700 Subject: [PATCH 234/632] Always use ... in stubs, not pass (#670) Confirmed that the CI failures are happening against trunk. I'm attempting to fix by upgrading black Merging since this is a trivial change --- libcst/codemod/visitors/tests/test_apply_type_annotations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 9a3e59d7..13b0f9f2 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -295,7 +295,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): "with_nested_import": ( """ def foo(x: django.http.response.HttpResponse) -> str: - pass + ... """, """ def foo(x) -> str: From 954bd99d8a37f3923528d560d9a948b4ac0dd130 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 7 Apr 2022 11:33:18 -0700 Subject: [PATCH 235/632] Bump black to latest to address failures in Python 3.9/3.10 (#672) * Bump black to latest to address failures in Python 3.9/3.10 * Rerun ufmt to adapt to new black version --- libcst/_parser/parso/pgen2/grammar_parser.py | 12 ++++-------- libcst/_parser/parso/python/token.py | 1 - libcst/_parser/parso/utils.py | 4 ++-- libcst/codemod/commands/rename.py | 13 +++++-------- requirements-dev.txt | 2 +- 5 files changed, 12 insertions(+), 20 deletions(-) diff --git a/libcst/_parser/parso/pgen2/grammar_parser.py b/libcst/_parser/parso/pgen2/grammar_parser.py index 5d0f2229..0d30199d 100644 --- a/libcst/_parser/parso/pgen2/grammar_parser.py +++ b/libcst/_parser/parso/pgen2/grammar_parser.py @@ -93,14 +93,10 @@ class GrammarParser: def _parse_items(self): # items: item+ a, b = self._parse_item() - while ( - self.type - in ( - PythonTokenTypes.NAME, - PythonTokenTypes.STRING, - ) - or self.value in ("(", "[") - ): + while self.type in ( + PythonTokenTypes.NAME, + PythonTokenTypes.STRING, + ) or self.value in ("(", "["): c, d = self._parse_item() # Need to end on the next item. b.add_arc(c) diff --git a/libcst/_parser/parso/python/token.py b/libcst/_parser/parso/python/token.py index ea357874..83731192 100644 --- a/libcst/_parser/parso/python/token.py +++ b/libcst/_parser/parso/python/token.py @@ -26,7 +26,6 @@ try: ERRORTOKEN: TokenType = native_token_type.ERRORTOKEN ERROR_DEDENT: TokenType = native_token_type.ERROR_DEDENT - except ImportError: from libcst._parser.parso.python.py_token import ( # noqa F401 PythonTokenTypes, diff --git a/libcst/_parser/parso/utils.py b/libcst/_parser/parso/utils.py index 0e4b4949..7814876c 100644 --- a/libcst/_parser/parso/utils.py +++ b/libcst/_parser/parso/utils.py @@ -114,11 +114,11 @@ def python_bytes_to_unicode( return b"utf-8" # pyre-ignore Pyre can't see that Union[str, bytes] conforms to AnyStr. - first_two_match = re.match(br"(?:[^\n]*\n){0,2}", source) + first_two_match = re.match(rb"(?:[^\n]*\n){0,2}", source) if first_two_match is None: return encoding first_two_lines = first_two_match.group(0) - possible_encoding = re.search(br"coding[=:]\s*([-\w.]+)", first_two_lines) + possible_encoding = re.search(rb"coding[=:]\s*([-\w.]+)", first_two_lines) if possible_encoding: return possible_encoding.group(1) else: diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 4b525ab3..6e7db0f4 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -259,14 +259,11 @@ class RenameCommand(VisitorBasedCodemodCommand): inside_import_statement: bool = not self.get_metadata( QualifiedNameProvider, original_node, set() ) - if ( - QualifiedNameProvider.has_name( - self, - original_node, - self.old_name, - ) - or (inside_import_statement and full_replacement_name == self.new_name) - ): + if QualifiedNameProvider.has_name( + self, + original_node, + self.old_name, + ) or (inside_import_statement and full_replacement_name == self.new_name): new_value, new_attr = self.new_module, self.new_mod_or_obj if not inside_import_statement: self.scheduled_removals.add(original_node.value) diff --git a/requirements-dev.txt b/requirements-dev.txt index 24331a99..640b8af7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -black==21.10b0 +black==22.3.0 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8 From 869af036e0fa5f5deb8f22c43085b51391c3c8bf Mon Sep 17 00:00:00 2001 From: Martin DeMello Date: Thu, 7 Apr 2022 15:12:30 -0700 Subject: [PATCH 236/632] Preserve as-imports when merging type annotations. (#664) * Preserve as-imports when merging type annotations. Fixes #661 * fix test and bad rebase * ufmt --- .../codemod/visitors/_apply_type_annotations.py | 12 +++++++++--- .../tests/test_apply_type_annotations.py | 17 +++++++++++++++++ 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index fe74b391..d5647798 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -369,8 +369,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): return (relative_prefix + qualifier, target) def _handle_qualification_and_should_qualify( - self, - qualified_name: str, + self, qualified_name: str, node: Optional[cst.CSTNode] = None ) -> bool: """ Based on a qualified name and the existing module imports, record that @@ -384,10 +383,15 @@ class TypeCollector(m.MatcherDecoratableVisitor): if module in self.existing_imports: return True else: + if node and isinstance(node, cst.Name) and node.value != target: + asname = node.value + else: + asname = None AddImportsVisitor.add_needed_import( self.context, module, target, + asname=asname, ) return False return False @@ -407,7 +411,9 @@ class TypeCollector(m.MatcherDecoratableVisitor): qualified_name, dequalified_node, ) = self._get_qualified_name_and_dequalified_node(node) - should_qualify = self._handle_qualification_and_should_qualify(qualified_name) + should_qualify = self._handle_qualification_and_should_qualify( + qualified_name, node + ) self.annotations.names.add(qualified_name) if should_qualify: return node diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 13b0f9f2..196cb383 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -292,6 +292,23 @@ class TestApplyAnnotationsVisitor(CodemodTest): return returns_baz() """, ), + "with_as_import": ( + """ + from bar import A as B + + def foo(x: B): ... + """, + """ + def foo(x): + pass + """, + """ + from bar import A as B + + def foo(x: B): + pass + """, + ), "with_nested_import": ( """ def foo(x: django.http.response.HttpResponse) -> str: From 0a8ae91d39caa6065f920f290c1195ab1c217a61 Mon Sep 17 00:00:00 2001 From: wiyr Date: Fri, 8 Apr 2022 23:25:15 +0800 Subject: [PATCH 237/632] fix qualified name get bug (#669) * fix qualified name get bug * added unittest --- libcst/metadata/scope_provider.py | 5 ++++- libcst/metadata/tests/test_scope_provider.py | 19 +++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 0a64c5a8..b826ec25 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -304,7 +304,10 @@ class ImportAssignment(Assignment): if eval_alias is not None: as_name = eval_alias if full_name.startswith(as_name): - remaining_name = full_name.split(as_name, 1)[1].lstrip(".") + remaining_name = full_name.split(as_name, 1)[1] + if remaining_name and not remaining_name.startswith("."): + continue + remaining_name = remaining_name.lstrip(".") results.add( QualifiedName( f"{real_name}.{remaining_name}" diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index ded9ab1e..0a541146 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -944,6 +944,25 @@ class ScopeProviderTest(UnitTest): {QualifiedName("f4..f5..C", QualifiedNameSource.LOCAL)}, ) + def test_get_qualified_names_for_the_same_prefix(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + from a import b, bc + bc() + """ + ) + call = ensure_type( + ensure_type( + ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.Expr + ).value, + cst.Call, + ) + module_scope = scopes[m] + self.assertEqual( + module_scope.get_qualified_names_for(call.func), + {QualifiedName("a.bc", QualifiedNameSource.IMPORT)}, + ) + def test_get_qualified_names_for_dotted_imports(self) -> None: m, scopes = get_scope_metadata_provider( """ From e6f208c7db07563a18a6066ab3deb5ca50a8ce7e Mon Sep 17 00:00:00 2001 From: Martin DeMello Date: Tue, 12 Apr 2022 12:12:55 -0700 Subject: [PATCH 238/632] Qualify imported symbols when the dequalified form would cause a conflict (#674) * Qualify imported symbols when the dequalified form would cause a conflict. Adds a preliminary pass that scans the stub file for all imported symbols, and collects the ones that cannot be safely dequalified. Fixes #673 * review fixes * handle symbol conflicts between the stub and the main file * fix type errors --- .../visitors/_apply_type_annotations.py | 285 +++++++++++++++--- libcst/codemod/visitors/_gather_imports.py | 12 + .../tests/test_apply_type_annotations.py | 44 +++ 3 files changed, 304 insertions(+), 37 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index d5647798..934e1b40 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -3,6 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +from collections import defaultdict from dataclasses import dataclass from typing import Dict, List, Optional, Sequence, Set, Tuple, Union @@ -14,6 +15,7 @@ from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._gather_global_names import GatherGlobalNamesVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor +from libcst.codemod.visitors._imports import ImportItem from libcst.helpers import get_full_name_for_node from libcst.metadata import PositionProvider, QualifiedNameProvider @@ -29,6 +31,41 @@ StarParamType = Union[ ] +def _module_and_target(qualified_name: str) -> Tuple[str, str]: + relative_prefix = "" + while qualified_name.startswith("."): + relative_prefix += "." + qualified_name = qualified_name[1:] + split = qualified_name.rsplit(".", 1) + if len(split) == 1: + qualifier, target = "", split[0] + else: + qualifier, target = split + return (relative_prefix + qualifier, target) + + +def _get_unique_qualified_name( + visitor: m.MatcherDecoratableVisitor, node: cst.CSTNode +) -> str: + name = None + names = [q.name for q in visitor.get_metadata(QualifiedNameProvider, node)] + if len(names) == 0: + # we hit this branch if the stub is directly using a fully + # qualified name, which is not technically valid python but is + # convenient to allow. + name = get_full_name_for_node(node) + elif len(names) == 1 and isinstance(names[0], str): + name = names[0] + if name is None: + start = visitor.get_metadata(PositionProvider, node).start + raise ValueError( + "Could not resolve a unique qualified name for type " + + f"{get_full_name_for_node(node)} at {start.line}:{start.column}. " + + f"Candidate names were: {names!r}" + ) + return name + + def _get_import_alias_names( import_aliases: Sequence[cst.ImportAlias], ) -> Set[str]: @@ -186,6 +223,130 @@ class Annotations: self.typevars = {k: v for k, v in self.typevars.items() if k in self.names} +@dataclass(frozen=True) +class ImportedSymbol: + """Import of foo.Bar, where both foo and Bar are potentially aliases.""" + + module_name: str + module_alias: Optional[str] = None + target_name: Optional[str] = None + target_alias: Optional[str] = None + + @property + def symbol(self) -> Optional[str]: + return self.target_alias or self.target_name + + @property + def module_symbol(self) -> str: + return self.module_alias or self.module_name + + +class ImportedSymbolCollector(m.MatcherDecoratableVisitor): + """ + Collect imported symbols from a stub module. + """ + + METADATA_DEPENDENCIES = ( + PositionProvider, + QualifiedNameProvider, + ) + + def __init__(self, existing_imports: Set[str], context: CodemodContext) -> None: + super().__init__() + self.existing_imports: Set[str] = existing_imports + self.imported_symbols: Dict[str, Set[ImportedSymbol]] = defaultdict(set) + + def visit_ClassDef(self, node: cst.ClassDef) -> None: + for base in node.bases: + value = base.value + if isinstance(value, NAME_OR_ATTRIBUTE): + self._handle_NameOrAttribute(value) + elif isinstance(value, cst.Subscript): + self._handle_Subscript(value) + + def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: + if node.returns is not None: + self._handle_Annotation(annotation=node.returns) + self._handle_Parameters(node.params) + + # pyi files don't support inner functions, return False to stop the traversal. + return False + + def visit_AnnAssign(self, node: cst.AnnAssign) -> None: + self._handle_Annotation(annotation=node.annotation) + + # Handler functions. + # + # These ultimately all call _handle_NameOrAttribute, which adds the + # qualified name to the list of imported symbols + + def _handle_NameOrAttribute( + self, + node: NameOrAttribute, + ) -> None: + obj = sym = None # keep pyre happy + if isinstance(node, cst.Name): + obj = None + sym = node.value + elif isinstance(node, cst.Attribute): + obj = node.value.value # pyre-ignore[16] + sym = node.attr.value + qualified_name = _get_unique_qualified_name(self, node) + module, target = _module_and_target(qualified_name) + if module in ("", "builtins"): + return + elif qualified_name not in self.existing_imports: + mod = ImportedSymbol( + module_name=module, + module_alias=obj if obj != module else None, + target_name=target, + target_alias=sym if sym != target else None, + ) + self.imported_symbols[sym].add(mod) + + def _handle_Index(self, slice: cst.Index) -> None: + value = slice.value + if isinstance(value, cst.Subscript): + self._handle_Subscript(value) + elif isinstance(value, cst.Attribute): + self._handle_NameOrAttribute(value) + + def _handle_Subscript(self, node: cst.Subscript) -> None: + value = node.value + if isinstance(value, NAME_OR_ATTRIBUTE): + self._handle_NameOrAttribute(value) + else: + raise ValueError("Expected any indexed type to have") + if _get_unique_qualified_name(self, node) in ("Type", "typing.Type"): + return + slice = node.slice + if isinstance(slice, tuple): + for item in slice: + if isinstance(item.slice.value, NAME_OR_ATTRIBUTE): + self._handle_NameOrAttribute(item.slice.value) + else: + if isinstance(item.slice, cst.Index): + self._handle_Index(item.slice) + elif isinstance(slice, cst.Index): + self._handle_Index(slice) + + def _handle_Annotation(self, annotation: cst.Annotation) -> None: + node = annotation.annotation + if isinstance(node, cst.Subscript): + self._handle_Subscript(node) + elif isinstance(node, NAME_OR_ATTRIBUTE): + self._handle_NameOrAttribute(node) + elif isinstance(node, cst.SimpleString): + pass + else: + raise ValueError(f"Unexpected annotation node: {node}") + + def _handle_Parameters(self, parameters: cst.Parameters) -> None: + for parameter in list(parameters.params): + if parameter.annotation is not None: + self._handle_Annotation(annotation=parameter.annotation) + + class TypeCollector(m.MatcherDecoratableVisitor): """ Collect type annotations from a stub module. @@ -201,6 +362,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): def __init__( self, existing_imports: Set[str], + module_imports: Dict[str, ImportItem], context: CodemodContext, ) -> None: super().__init__() @@ -212,6 +374,9 @@ class TypeCollector(m.MatcherDecoratableVisitor): # as well as module names, although downstream we effectively ignore # the module names as of the current implementation. self.existing_imports: Set[str] = existing_imports + # Module imports, gathered by prescanning the stub file to determine + # which modules need to be imported directly to qualify their symbols. + self.module_imports: Dict[str, ImportItem] = module_imports # Fields that help us track temporary state as we recurse self.qualifier: List[str] = [] self.current_assign: Optional[cst.Assign] = None # used to collect typevars @@ -323,36 +488,6 @@ class TypeCollector(m.MatcherDecoratableVisitor): ) -> None: self.annotations.finish() - def _get_unique_qualified_name( - self, - node: cst.CSTNode, - ) -> str: - name = None - names = [q.name for q in self.get_metadata(QualifiedNameProvider, node)] - if len(names) == 0: - # we hit this branch if the stub is directly using a fully - # qualified name, which is not technically valid python but is - # convenient to allow. - name = get_full_name_for_node(node) - elif len(names) == 1 and isinstance(names[0], str): - name = names[0] - if name is None: - start = self.get_metadata(PositionProvider, node).start - raise ValueError( - "Could not resolve a unique qualified name for type " - + f"{get_full_name_for_node(node)} at {start.line}:{start.column}. " - + f"Candidate names were: {names!r}" - ) - return name - - def _get_qualified_name_and_dequalified_node( - self, - node: Union[cst.Name, cst.Attribute], - ) -> Tuple[str, Union[cst.Name, cst.Attribute]]: - qualified_name = self._get_unique_qualified_name(node) - dequalified_node = node.attr if isinstance(node, cst.Attribute) else node - return qualified_name, dequalified_node - def _module_and_target( self, qualified_name: str, @@ -382,6 +517,16 @@ class TypeCollector(m.MatcherDecoratableVisitor): elif qualified_name not in self.existing_imports: if module in self.existing_imports: return True + elif module in self.module_imports: + m = self.module_imports[module] + if m.obj_name is None: + asname = m.alias + else: + asname = None + AddImportsVisitor.add_needed_import( + self.context, m.module_name, asname=asname + ) + return True else: if node and isinstance(node, cst.Name) and node.value != target: asname = node.value @@ -407,17 +552,18 @@ class TypeCollector(m.MatcherDecoratableVisitor): self, node: NameOrAttribute, ) -> Union[cst.Name, cst.Attribute]: - ( - qualified_name, - dequalified_node, - ) = self._get_qualified_name_and_dequalified_node(node) + qualified_name = _get_unique_qualified_name(self, node) should_qualify = self._handle_qualification_and_should_qualify( qualified_name, node ) self.annotations.names.add(qualified_name) if should_qualify: - return node + qualified_node = ( + cst.parse_module(qualified_name) if isinstance(node, cst.Name) else node + ) + return qualified_node # pyre-ignore[7] else: + dequalified_node = node.attr if isinstance(node, cst.Attribute) else node return dequalified_node def _handle_Index( @@ -443,7 +589,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): new_node = node.with_changes(value=self._handle_NameOrAttribute(value)) else: raise ValueError("Expected any indexed type to have") - if self._get_unique_qualified_name(node) in ("Type", "typing.Type"): + if _get_unique_qualified_name(self, node) in ("Type", "typing.Type"): # Note: we are intentionally not handling qualification of # anything inside `Type` because it's common to have nested # classes, which we cannot currently distinguish from classes @@ -679,7 +825,8 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.strict_annotation_matching = ( self.strict_annotation_matching or strict_annotation_matching ) - visitor = TypeCollector(existing_import_names, self.context) + module_imports = self._get_module_imports(stub, import_gatherer) + visitor = TypeCollector(existing_import_names, module_imports, self.context) cst.MetadataWrapper(stub).visit(visitor) self.annotations.update(visitor.annotations) @@ -697,6 +844,70 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): else: return tree + # helpers for collecting type information from the stub files + + def _get_module_imports( + self, stub: cst.Module, existing_import_gatherer: GatherImportsVisitor + ) -> Dict[str, ImportItem]: + """Returns a dict of modules that need to be imported to qualify symbols.""" + # We correlate all imported symbols, e.g. foo.bar.Baz, with a list of module + # and from imports. If the same unqualified symbol is used from different + # modules, we give preference to an explicit from-import if any, and qualify + # everything else by importing the module. + # + # e.g. the following stub: + # import foo as quux + # from bar import Baz as X + # def f(x: X) -> quux.X: ... + # will return {'foo': ImportItem("foo", "quux")}. When the apply type + # annotation visitor hits `quux.X` it will retrieve the canonical name + # `foo.X` and then note that `foo` is in the module imports map, so it will + # leave the symbol qualified. + import_gatherer = GatherImportsVisitor(CodemodContext()) + stub.visit(import_gatherer) + symbol_map = import_gatherer.symbol_mapping + existing_import_names = _get_imported_names( + existing_import_gatherer.all_imports + ) + symbol_collector = ImportedSymbolCollector(existing_import_names, self.context) + cst.MetadataWrapper(stub).visit(symbol_collector) + module_imports = {} + for sym, imported_symbols in symbol_collector.imported_symbols.items(): + existing = existing_import_gatherer.symbol_mapping.get(sym) + if existing and any( + s.module_name != existing.module_name for s in imported_symbols + ): + # If a symbol is imported in the main file, we have to qualify + # it when imported from a different module in the stub file. + used = True + elif len(imported_symbols) == 1: + # If we have a single use of a new symbol we can from-import it + continue + else: + # There are multiple occurrences in the stub file and none in + # the main file. At least one can be from-imported. + used = False + for imp_sym in imported_symbols: + if not imp_sym.symbol: + continue + imp = symbol_map.get(imp_sym.symbol) + if not used and imp and imp.module_name == imp_sym.module_name: + # We can only import a symbol directly once. + used = True + elif sym in existing_import_names: + if imp: + module_imports[imp.module_name] = imp + else: + imp = symbol_map.get(imp_sym.module_symbol) + if imp: + # imp will be None in corner cases like + # import foo.bar as Baz + # x: Baz + # which is technically valid python but nonsensical as a + # type annotation. Dropping it on the floor for now. + module_imports[imp.module_name] = imp + return module_imports + # helpers for processing annotation nodes def _quote_future_annotations(self, annotation: cst.Annotation) -> cst.Annotation: # TODO: We probably want to make sure references to classes defined in the current diff --git a/libcst/codemod/visitors/_gather_imports.py b/libcst/codemod/visitors/_gather_imports.py index 14760798..e62e374a 100644 --- a/libcst/codemod/visitors/_gather_imports.py +++ b/libcst/codemod/visitors/_gather_imports.py @@ -8,6 +8,7 @@ from typing import Dict, List, Sequence, Set, Tuple, Union import libcst from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareVisitor +from libcst.codemod.visitors._imports import ImportItem from libcst.helpers import get_absolute_module_for_import @@ -60,6 +61,8 @@ class GatherImportsVisitor(ContextAwareVisitor): self.alias_mapping: Dict[str, List[Tuple[str, str]]] = {} # Track all of the imports found in this transform self.all_imports: List[Union[libcst.Import, libcst.ImportFrom]] = [] + # Track the import for every symbol introduced into the module + self.symbol_mapping: Dict[str, ImportItem] = {} def visit_Import(self, node: libcst.Import) -> None: # Track this import statement for later analysis. @@ -67,12 +70,15 @@ class GatherImportsVisitor(ContextAwareVisitor): for name in node.names: alias = name.evaluated_alias + imp = ImportItem(name.evaluated_name, alias=alias) if alias is not None: # Track this as an aliased module self.module_aliases[name.evaluated_name] = alias + self.symbol_mapping[alias] = imp else: # Get the module we're importing as a string. self.module_imports.add(name.evaluated_name) + self.symbol_mapping[name.evaluated_name] = imp def visit_ImportFrom(self, node: libcst.ImportFrom) -> None: # Track this import statement for later analysis. @@ -114,3 +120,9 @@ class GatherImportsVisitor(ContextAwareVisitor): return self.object_mapping[module].update(new_objects) + for ia in nodenames: + imp = ImportItem( + module, obj_name=ia.evaluated_name, alias=ia.evaluated_alias + ) + key = ia.evaluated_alias or ia.evaluated_name + self.symbol_mapping[key] = imp diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 196cb383..385ab7fa 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -309,6 +309,50 @@ class TestApplyAnnotationsVisitor(CodemodTest): pass """, ), + "with_conflicting_imported_symbols": ( + """ + import a.foo as bar + from b.c import Baz as B + import d + + def f(a: d.A, b: B) -> bar.B: ... + """, + """ + def f(a, b): + pass + """, + """ + import a.foo as bar + from b.c import Baz as B + from d import A + + def f(a: A, b: B) -> bar.B: + pass + """, + ), + "with_conflicts_between_imported_and_existing_symbols": ( + """ + from a import A + from b import B + + def f(x: A, y: B) -> None: ... + """, + """ + from b import A, B + + def f(x, y): + y = A(x) + z = B(y) + """, + """ + from b import A, B + import a + + def f(x: a.A, y: B) -> None: + y = A(x) + z = B(y) + """, + ), "with_nested_import": ( """ def foo(x: django.http.response.HttpResponse) -> str: From abc566cd4a2502cf7c93bfeb63d07d76b0a3cea3 Mon Sep 17 00:00:00 2001 From: Martin DeMello Date: Wed, 13 Apr 2022 14:23:21 -0700 Subject: [PATCH 239/632] add an always_qualify_annotations argument to the type annotation visitor (#676) --- .../visitors/_apply_type_annotations.py | 23 ++++++- .../tests/test_apply_type_annotations.py | 69 +++++++++++++++++++ 2 files changed, 89 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 934e1b40..7811aa63 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -721,6 +721,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): use_future_annotations: bool = False, strict_posargs_matching: bool = True, strict_annotation_matching: bool = False, + always_qualify_annotations: bool = False, ) -> None: super().__init__(context) # Qualifier for storing the canonical name of the current function. @@ -734,6 +735,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.use_future_annotations = use_future_annotations self.strict_posargs_matching = strict_posargs_matching self.strict_annotation_matching = strict_annotation_matching + self.always_qualify_annotations = always_qualify_annotations # We use this to determine the end of the import block so that we can # insert top-level annotations. @@ -761,6 +763,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): use_future_annotations: bool = False, strict_posargs_matching: bool = True, strict_annotation_matching: bool = False, + always_qualify_annotations: bool = False, ) -> None: """ Store a stub module in the :class:`~libcst.codemod.CodemodContext` so @@ -779,6 +782,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): use_future_annotations, strict_posargs_matching, strict_annotation_matching, + always_qualify_annotations, ) def transform_module_impl( @@ -812,6 +816,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): use_future_annotations, strict_posargs_matching, strict_annotation_matching, + always_qualify_annotations, ) = context_contents self.overwrite_existing_annotations = ( self.overwrite_existing_annotations or overwrite_existing_annotations @@ -825,6 +830,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.strict_annotation_matching = ( self.strict_annotation_matching or strict_annotation_matching ) + self.always_qualify_annotations = ( + self.always_qualify_annotations or always_qualify_annotations + ) module_imports = self._get_module_imports(stub, import_gatherer) visitor = TypeCollector(existing_import_names, module_imports, self.context) cst.MetadataWrapper(stub).visit(visitor) @@ -846,7 +854,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # helpers for collecting type information from the stub files - def _get_module_imports( + def _get_module_imports( # noqa: C901: too complex self, stub: cst.Module, existing_import_gatherer: GatherImportsVisitor ) -> Dict[str, ImportItem]: """Returns a dict of modules that need to be imported to qualify symbols.""" @@ -880,7 +888,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # If a symbol is imported in the main file, we have to qualify # it when imported from a different module in the stub file. used = True - elif len(imported_symbols) == 1: + elif len(imported_symbols) == 1 and not self.always_qualify_annotations: # If we have a single use of a new symbol we can from-import it continue else: @@ -891,7 +899,16 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): if not imp_sym.symbol: continue imp = symbol_map.get(imp_sym.symbol) - if not used and imp and imp.module_name == imp_sym.module_name: + if self.always_qualify_annotations and sym not in existing_import_names: + # Override 'always qualify' if this is a typing import, or + # the main file explicitly from-imports a symbol. + if imp and imp.module_name != "typing": + module_imports[imp.module_name] = imp + else: + imp = symbol_map.get(imp_sym.module_symbol) + if imp: + module_imports[imp.module_name] = imp + elif not used and imp and imp.module_name == imp_sym.module_name: # We can only import a symbol directly once. used = True elif sym in existing_import_names: diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 385ab7fa..aa05ab73 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -1676,3 +1676,72 @@ class TestApplyAnnotationsVisitor(CodemodTest): self.assertEqual( any_changes_applied, visitor.annotation_counts.any_changes_applied() ) + + @data_provider( + { + "always_qualify": ( + """ + from a import A + import b + def f(x: A, y: b.B) -> None: ... + """, + """ + def f(x, y): + pass + """, + """ + import a + import b + + def f(x: a.A, y: b.B) -> None: + pass + """, + ), + "never_qualify_typing": ( + """ + from a import A + from b import B + from typing import List + + def f(x: List[A], y: B[A]) -> None: ... + """, + """ + def f(x, y): + pass + """, + """ + import a + import b + from typing import List + + def f(x: List[a.A], y: b.B[a.A]) -> None: + pass + """, + ), + "preserve_explicit_from_import": ( + """ + from a import A + import b + def f(x: A, y: b.B) -> None: ... + """, + """ + from b import B + def f(x, y): + pass + """, + """ + from b import B + import a + + def f(x: a.A, y: B) -> None: + pass + """, + ), + } + ) + def test_signature_matching_with_always_qualify( + self, stub: str, before: str, after: str + ) -> None: + self.run_test_case_with_flags( + stub=stub, before=before, after=after, always_qualify_annotations=True + ) From 6e0c63ae9cb97f3cf40143313783d05e8c699e35 Mon Sep 17 00:00:00 2001 From: toofar Date: Fri, 15 Apr 2022 12:13:40 +1200 Subject: [PATCH 240/632] rename codemod: Correct last renamed import from (#675) Correct the renamed import from structure when renaming last imported name from a module. Given from a.b import qux print(qux) And providing old_name="a.b.qux" and new_name="a:b.qux" I expect the following output (as described int the command description): from a import b print(b.qux) But what I get is: from a import b.qux print(b.qux) It pulls the old name up into the new one. The provided test is the important part but I've attempted a fix too. I suspect there is a better one and that the special casing of the "this is that last name" situation shouldn't be needed. For instance there is import removing code in leave_Module and renaming the first of many names (as opposed to the last) happily adds a correct import line. I didn't manage to grok the code and all the concepts it requires to provide a better fix though. This leaves the alias adjustments to the existing code and just does the module renaming the int he special casing block. I don't know why scheduling removal of the updated node is required, it makes the tests pass though. --- libcst/codemod/commands/rename.py | 6 +++--- libcst/codemod/commands/tests/test_rename.py | 18 ++++++++++++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 6e7db0f4..62809cc9 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -206,11 +206,11 @@ class RenameCommand(VisitorBasedCodemodCommand): ] = self.gen_name_or_attr_node(replacement_obj) # Rename on the spot only if this is the only imported name under the module. if len(names) == 1: - self.bypass_import = True - return updated_node.with_changes( + updated_node = updated_node.with_changes( module=cst.parse_expression(replacement_module), - names=(cst.ImportAlias(name=new_import_alias_name),), ) + self.scheduled_removals.add(updated_node) + new_names.append(import_alias) # Or if the module name is to stay the same. elif replacement_module == imported_module_name: self.bypass_import = True diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index e0668bfb..f97062b7 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -660,3 +660,21 @@ class TestRenameCommand(CodemodTest): bar(42) """ self.assertCodemod(before, before, old_name="baz.bar", new_name="qux.bar") + + def test_rename_single_with_colon(self) -> None: + before = """ + from a.b import qux + + print(qux) + """ + after = """ + from a import b + + print(b.qux) + """ + self.assertCodemod( + before, + after, + old_name="a.b.qux", + new_name="a:b.qux", + ) From c30bbcfa488380e30e9d33369f3ca73472938f52 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 4 May 2022 12:51:49 +0100 Subject: [PATCH 241/632] make sure ParserError's raw_line is zero-indexed (#681) --- libcst/_parser/tests/test_parse_errors.py | 2 ++ native/libcst/src/parser/errors.rs | 8 ++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index f36d08e7..b39ea20e 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -172,6 +172,8 @@ class ParseErrorsTest(UnitTest): ) -> None: with self.assertRaises(cst.ParserSyntaxError) as cm: parse_fn() + # make sure str() doesn't blow up + self.assertIn("Syntax Error", str(cm.exception)) if not is_native(): self.assertEqual(str(cm.exception), expected) diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index aae7fdb6..cc58bfa2 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -49,16 +49,20 @@ mod py_error { } _ => vec![""], }; - let (line, col) = match &e { + let (mut line, mut col) = match &e { ParserError::ParserError(err, ..) => { (err.location.start_pos.line, err.location.start_pos.column) } _ => (0, 0), }; + if line + 1 > lines.len() { + line = lines.len() - 1; + col = 0; + } let kwargs = [ ("message", e.to_string().into_py(py)), ("lines", lines.into_py(py)), - ("raw_line", line.into_py(py)), + ("raw_line", (line + 1).into_py(py)), ("raw_column", col.into_py(py)), ] .into_py_dict(py); From c6559671aa6895db3450a4fc741578e7184b29ee Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 4 May 2022 09:50:26 -0400 Subject: [PATCH 242/632] Consider access information when computing qualified names for nodes (#682) * Write a test case showing qualified names for shadowed assignments * Consider accesses when looking up names of nodes * Fix format * Fix typecheck --- libcst/metadata/scope_provider.py | 16 ++++++++++ libcst/metadata/tests/test_name_provider.py | 34 ++++++++++++++++++++- 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index b826ec25..f4db3e89 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -543,6 +543,22 @@ class Scope(abc.ABC): considering it could be a complex type annotation in the string which is hard to resolve, e.g. ``List[Union[int, str]]``. """ + + # if this node is an access we know the assignment and we can use that name + node_accesses = { + access + for all_accesses in self._accesses.values() + for access in all_accesses + if access.node == node + } + if node_accesses: + return { + qname + for access in node_accesses + for referent in access.referents + for qname in referent.get_qualified_names_for(referent.name) + } + results = set() full_name = get_full_name_for_node(node) if full_name is None: diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index a9c583c3..9b0b409f 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -102,7 +102,8 @@ class QualifiedNameProviderTest(UnitTest): cls = ensure_type(m.body[1], cst.ClassDef) f = ensure_type(cls.body.body[0], cst.FunctionDef) self.assertEqual( - names[ensure_type(f.returns, cst.Annotation).annotation], set() + names[ensure_type(f.returns, cst.Annotation).annotation], + {QualifiedName("a.b.c", QualifiedNameSource.IMPORT)}, ) c_call = ensure_type( @@ -414,6 +415,37 @@ class QualifiedNameProviderTest(UnitTest): }, ) + def test_shadowed_assignments(self) -> None: + m, names = get_qualified_name_metadata_provider( + """ + from lib import a,b,c + a = a + class Test: + b = b + def func(): + c = c + """ + ) + + # pyre-fixme[53]: Captured variable `names` is not annotated. + def test_name(node: cst.CSTNode, qnames: Set[QualifiedName]) -> None: + name = ensure_type( + ensure_type(node, cst.SimpleStatementLine).body[0], cst.Assign + ).value + self.assertEqual(names[name], qnames) + + test_name(m.body[1], {QualifiedName("lib.a", QualifiedNameSource.IMPORT)}) + + cls = ensure_type(m.body[2], cst.ClassDef) + test_name( + cls.body.body[0], {QualifiedName("lib.b", QualifiedNameSource.IMPORT)} + ) + + func = ensure_type(m.body[3], cst.FunctionDef) + test_name( + func.body.body[0], {QualifiedName("lib.c", QualifiedNameSource.IMPORT)} + ) + class FullyQualifiedNameProviderTest(UnitTest): @data_provider( From 460698a205cd7ba543fc01c5442b15bb71ab36b3 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 4 May 2022 17:20:59 +0100 Subject: [PATCH 243/632] bump version to 0.4.2 --- CHANGELOG.md | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e020592a..8db45730 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,37 @@ +# 0.4.2 - 2022-05-04 + +## New Contributors +* @stanislavlevin made their first contribution in https://github.com/Instagram/LibCST/pull/650 +* @dmitryvinn made their first contribution in https://github.com/Instagram/LibCST/pull/655 +* @wiyr made their first contribution in https://github.com/Instagram/LibCST/pull/669 +* @toofar made their first contribution in https://github.com/Instagram/LibCST/pull/675 + +## Fixed +* native: Avoid crashing by making IntoPy conversion fallible by @zsol in https://github.com/Instagram/LibCST/pull/639 +* native: make sure ParserError's line is zero-indexed by @zsol in https://github.com/Instagram/LibCST/pull/681 +* Fix space validation for AsName and Await by @zsol in https://github.com/Instagram/LibCST/pull/641 +* Qualified Name Provider: Fix returned qname for symbols that are prefixes of each other by @wiyr in https://github.com/Instagram/LibCST/pull/669 +* Rename Codemod: Correct last renamed import from by @toofar in https://github.com/Instagram/LibCST/pull/675 +* Many changes to the Apply Type Comments codemod: + * Allow for skipping quotes when applying type comments by @stroxler in https://github.com/Instagram/LibCST/pull/644 + * Port pyre fixes by @stroxler in https://github.com/Instagram/LibCST/pull/651 + * Preserve as-imports when merging type annotations. by @martindemello in https://github.com/Instagram/LibCST/pull/664 + * Qualify imported symbols when the dequalified form would cause a conflict by @martindemello in https://github.com/Instagram/LibCST/pull/674 + * Add an argument to always qualify imported type annotations. by @martindemello in https://github.com/Instagram/LibCST/pull/676 + +## Added + +* Create an AddTrailingCommas codemod by @stroxler in https://github.com/Instagram/LibCST/pull/643 +* Define gather global names visitor by @shannonzhu in https://github.com/Instagram/LibCST/pull/657 + +## Updated + +* Support module and package names in the codemod context by @lpetre in https://github.com/Instagram/LibCST/pull/662 +* Drop support for running libcst using a python 3.6 interpreter by @lpetre in https://github.com/Instagram/LibCST/pull/663 +* Update relative import logic to match cpython by @lpetre in https://github.com/Instagram/LibCST/pull/660 +* Scope Provider: Consider access information when computing qualified names for nodes by @lpetre in https://github.com/Instagram/LibCST/pull/682 + + # 0.4.1 - 2022-01-28 ## New Contributors From 149599ee88f70c845da2a2af63c3e3596bab4551 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Wed, 11 May 2022 07:42:37 -0400 Subject: [PATCH 244/632] Restore the 0.4.1 behavior for libcst.helpers.get_absolute_module (#684) --- .../codemod/commands/remove_unused_imports.py | 6 +- libcst/codemod/visitors/_add_imports.py | 4 +- libcst/codemod/visitors/_gather_imports.py | 6 +- libcst/codemod/visitors/_imports.py | 6 +- libcst/codemod/visitors/_remove_imports.py | 11 ++-- libcst/helpers/__init__.py | 6 ++ libcst/helpers/module.py | 57 +++++++++++++++++-- libcst/helpers/tests/test_module.py | 55 ++++++++++++++++-- 8 files changed, 130 insertions(+), 21 deletions(-) diff --git a/libcst/codemod/commands/remove_unused_imports.py b/libcst/codemod/commands/remove_unused_imports.py index e9938d8a..2e6beafa 100644 --- a/libcst/codemod/commands/remove_unused_imports.py +++ b/libcst/codemod/commands/remove_unused_imports.py @@ -9,7 +9,7 @@ from typing import Set, Tuple, Union from libcst import Import, ImportFrom, ImportStar, Module from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand from libcst.codemod.visitors import GatherCommentsVisitor, RemoveImportsVisitor -from libcst.helpers import get_absolute_module_for_import +from libcst.helpers import get_absolute_module_from_package_for_import from libcst.metadata import PositionProvider, ProviderT DEFAULT_SUPPRESS_COMMENT_REGEX = ( @@ -74,8 +74,8 @@ class RemoveUnusedImportsCommand(VisitorBasedCodemodCommand): asname=alias.evaluated_alias, ) else: - module_name = get_absolute_module_for_import( - self.context.full_module_name, node + module_name = get_absolute_module_from_package_for_import( + self.context.full_package_name, node ) if module_name is None: raise ValueError( diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index 95188561..a3ca32f9 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -12,7 +12,7 @@ from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._gather_imports import GatherImportsVisitor from libcst.codemod.visitors._imports import ImportItem -from libcst.helpers import get_absolute_module_for_import +from libcst.helpers import get_absolute_module_from_package_for_import class AddImportsVisitor(ContextAwareTransformer): @@ -214,7 +214,7 @@ class AddImportsVisitor(ContextAwareTransformer): return updated_node # Get the module we're importing as a string, see if we have work to do. - module = get_absolute_module_for_import( + module = get_absolute_module_from_package_for_import( self.context.full_package_name, updated_node ) if ( diff --git a/libcst/codemod/visitors/_gather_imports.py b/libcst/codemod/visitors/_gather_imports.py index e62e374a..4847afc1 100644 --- a/libcst/codemod/visitors/_gather_imports.py +++ b/libcst/codemod/visitors/_gather_imports.py @@ -9,7 +9,7 @@ import libcst from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareVisitor from libcst.codemod.visitors._imports import ImportItem -from libcst.helpers import get_absolute_module_for_import +from libcst.helpers import get_absolute_module_from_package_for_import class GatherImportsVisitor(ContextAwareVisitor): @@ -85,7 +85,9 @@ class GatherImportsVisitor(ContextAwareVisitor): self.all_imports.append(node) # Get the module we're importing as a string. - module = get_absolute_module_for_import(self.context.full_package_name, node) + module = get_absolute_module_from_package_for_import( + self.context.full_package_name, node + ) if module is None: # Can't get the absolute import from relative, so we can't # support this. diff --git a/libcst/codemod/visitors/_imports.py b/libcst/codemod/visitors/_imports.py index 8e8697e8..f3c1c305 100644 --- a/libcst/codemod/visitors/_imports.py +++ b/libcst/codemod/visitors/_imports.py @@ -6,7 +6,7 @@ from dataclasses import dataclass, replace from typing import Optional -from libcst.helpers import get_absolute_module +from libcst.helpers import get_absolute_module_from_package @dataclass(frozen=True) @@ -39,5 +39,7 @@ class ImportItem: mod = replace(mod, module_name="", obj_name=mod.module_name) if package_name is None: return mod - m = get_absolute_module(package_name, mod.module_name or None, self.relative) + m = get_absolute_module_from_package( + package_name, mod.module_name or None, self.relative + ) return mod if m is None else replace(mod, module_name=m, relative=0) diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 2a90a351..55940127 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -9,7 +9,10 @@ import libcst as cst from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer, ContextAwareVisitor from libcst.codemod.visitors._gather_unused_imports import GatherUnusedImportsVisitor -from libcst.helpers import get_absolute_module_for_import, get_full_name_for_node +from libcst.helpers import ( + get_absolute_module_from_package_for_import, + get_full_name_for_node, +) from libcst.metadata import Assignment, ProviderT, ScopeProvider @@ -38,7 +41,7 @@ class RemovedNodeVisitor(ContextAwareVisitor): # We don't handle removing this, so ignore it. return - module_name = get_absolute_module_for_import( + module_name = get_absolute_module_from_package_for_import( self.context.full_package_name, import_node ) if module_name is None: @@ -248,7 +251,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): if isinstance(names, cst.ImportStar): # We don't handle removing this, so ignore it. return - module_name = get_absolute_module_for_import( + module_name = get_absolute_module_from_package_for_import( context.full_package_name, node ) if module_name is None: @@ -415,7 +418,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): return updated_node # Make sure we actually know the absolute module. - module_name = get_absolute_module_for_import( + module_name = get_absolute_module_from_package_for_import( self.context.full_package_name, updated_node ) if module_name is None or module_name not in self.unused_obj_imports: diff --git a/libcst/helpers/__init__.py b/libcst/helpers/__init__.py index 6f0db041..c7fdf9b1 100644 --- a/libcst/helpers/__init__.py +++ b/libcst/helpers/__init__.py @@ -19,6 +19,9 @@ from libcst.helpers.module import ( get_absolute_module, get_absolute_module_for_import, get_absolute_module_for_import_or_raise, + get_absolute_module_from_package, + get_absolute_module_from_package_for_import, + get_absolute_module_from_package_for_import_or_raise, insert_header_comments, ModuleNameAndPackage, ) @@ -28,6 +31,9 @@ __all__ = [ "get_absolute_module", "get_absolute_module_for_import", "get_absolute_module_for_import_or_raise", + "get_absolute_module_from_package", + "get_absolute_module_from_package_for_import", + "get_absolute_module_from_package_for_import_or_raise", "get_full_name_for_node", "get_full_name_for_node_or_raise", "ensure_type", diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 6a779594..f9961807 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -35,6 +35,55 @@ def insert_header_comments(node: Module, comments: List[str]) -> Module: def get_absolute_module( + current_module: Optional[str], module_name: Optional[str], num_dots: int +) -> Optional[str]: + if num_dots == 0: + # This is an absolute import, so the module is correct. + return module_name + if current_module is None: + # We don't actually have the current module available, so we can't compute + # the absolute module from relative. + return None + # We have the current module, as well as the relative, let's compute the base. + modules = current_module.split(".") + if len(modules) < num_dots: + # This relative import goes past the base of the repository, so we can't calculate it. + return None + base_module = ".".join(modules[:-num_dots]) + # Finally, if the module name was supplied, append it to the end. + if module_name is not None: + # If we went all the way to the top, the base module should be empty, so we + # should return the relative bit as absolute. Otherwise, combine the base + # module and module name using a dot separator. + base_module = ( + f"{base_module}.{module_name}" if len(base_module) > 0 else module_name + ) + # If they tried to import all the way to the root, return None. Otherwise, + # return the module itself. + return base_module if len(base_module) > 0 else None + + +def get_absolute_module_for_import( + current_module: Optional[str], import_node: ImportFrom +) -> Optional[str]: + # First, let's try to grab the module name, regardless of relative status. + module = import_node.module + module_name = get_full_name_for_node(module) if module is not None else None + # Now, get the relative import location if it exists. + num_dots = len(import_node.relative) + return get_absolute_module(current_module, module_name, num_dots) + + +def get_absolute_module_for_import_or_raise( + current_module: Optional[str], import_node: ImportFrom +) -> str: + module = get_absolute_module_for_import(current_module, import_node) + if module is None: + raise Exception(f"Unable to compute absolute module for {import_node}") + return module + + +def get_absolute_module_from_package( current_package: Optional[str], module_name: Optional[str], num_dots: int ) -> Optional[str]: if num_dots == 0: @@ -55,7 +104,7 @@ def get_absolute_module( return "{}.{}".format(base, module_name) if module_name else base -def get_absolute_module_for_import( +def get_absolute_module_from_package_for_import( current_package: Optional[str], import_node: ImportFrom ) -> Optional[str]: # First, let's try to grab the module name, regardless of relative status. @@ -63,13 +112,13 @@ def get_absolute_module_for_import( module_name = get_full_name_for_node(module) if module is not None else None # Now, get the relative import location if it exists. num_dots = len(import_node.relative) - return get_absolute_module(current_package, module_name, num_dots) + return get_absolute_module_from_package(current_package, module_name, num_dots) -def get_absolute_module_for_import_or_raise( +def get_absolute_module_from_package_for_import_or_raise( current_package: Optional[str], import_node: ImportFrom ) -> str: - module = get_absolute_module_for_import(current_package, import_node) + module = get_absolute_module_from_package_for_import(current_package, import_node) if module is None: raise Exception(f"Unable to compute absolute module for {import_node}") return module diff --git a/libcst/helpers/tests/test_module.py b/libcst/helpers/tests/test_module.py index 0adc123e..ea8bf43f 100644 --- a/libcst/helpers/tests/test_module.py +++ b/libcst/helpers/tests/test_module.py @@ -11,6 +11,8 @@ from libcst.helpers.module import ( calculate_module_and_package, get_absolute_module_for_import, get_absolute_module_for_import_or_raise, + get_absolute_module_from_package_for_import, + get_absolute_module_from_package_for_import_or_raise, insert_header_comments, ModuleNameAndPackage, ) @@ -67,6 +69,44 @@ class ModuleTest(UnitTest): insert_header_comments(node, inserted_comments).code, expected_code ) + @data_provider( + ( + # Simple imports that are already absolute. + (None, "from a.b import c", "a.b"), + ("x.y.z", "from a.b import c", "a.b"), + # Relative import that can't be resolved due to missing module. + (None, "from ..w import c", None), + # Relative import that goes past the module level. + ("x", "from ...y import z", None), + ("x.y.z", "from .....w import c", None), + ("x.y.z", "from ... import c", None), + # Correct resolution of absolute from relative modules. + ("x.y.z", "from . import c", "x.y"), + ("x.y.z", "from .. import c", "x"), + ("x.y.z", "from .w import c", "x.y.w"), + ("x.y.z", "from ..w import c", "x.w"), + ("x.y.z", "from ...w import c", "w"), + ) + ) + def test_get_absolute_module( + self, + module: Optional[str], + importfrom: str, + output: Optional[str], + ) -> None: + node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine) + assert len(node.body) == 1, "Unexpected number of statements!" + import_node = ensure_type(node.body[0], cst.ImportFrom) + + self.assertEqual(get_absolute_module_for_import(module, import_node), output) + if output is None: + with self.assertRaises(Exception): + get_absolute_module_for_import_or_raise(module, import_node) + else: + self.assertEqual( + get_absolute_module_for_import_or_raise(module, import_node), output + ) + @data_provider( ( # Simple imports that are already absolute. @@ -94,7 +134,7 @@ class ModuleTest(UnitTest): ("x/y/z/__init__.py", "from ...w import c", "x.w"), ) ) - def test_get_absolute_module( + def test_get_absolute_module_from_package( self, filename: Optional[str], importfrom: str, @@ -108,13 +148,20 @@ class ModuleTest(UnitTest): assert len(node.body) == 1, "Unexpected number of statements!" import_node = ensure_type(node.body[0], cst.ImportFrom) - self.assertEqual(get_absolute_module_for_import(package, import_node), output) + self.assertEqual( + get_absolute_module_from_package_for_import(package, import_node), output + ) if output is None: with self.assertRaises(Exception): - get_absolute_module_for_import_or_raise(package, import_node) + get_absolute_module_from_package_for_import_or_raise( + package, import_node + ) else: self.assertEqual( - get_absolute_module_for_import_or_raise(package, import_node), output + get_absolute_module_from_package_for_import_or_raise( + package, import_node + ), + output, ) @data_provider( From 69a4f4e3a319665494886dedab48a39cb7b75858 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 11 May 2022 13:01:31 +0100 Subject: [PATCH 245/632] bump version to 0.4.3 --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8db45730..8cf7863e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +# 0.4.3 - 2022-05-11 + +## Fixed +* Restore the 0.4.1 behavior for libcst.helpers.get_absolute_module by @lpetre in https://github.com/Instagram/LibCST/pull/684 + + # 0.4.2 - 2022-05-04 ## New Contributors From 5900a4ecd653487b1a30ee56ce010585bd7586c7 Mon Sep 17 00:00:00 2001 From: Adam Johnson Date: Mon, 16 May 2022 19:11:11 +0100 Subject: [PATCH 246/632] Add package links to PyPI (#688) --- setup.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/setup.py b/setup.py index 4fa892dd..294f1b32 100644 --- a/setup.py +++ b/setup.py @@ -34,6 +34,10 @@ setuptools.setup( long_description=long_description, long_description_content_type="text/x-rst", url="https://github.com/Instagram/LibCST", + project_urls={ + "Changelog": "https://github.com/Instagram/LibCST/blob/main/CHANGELOG.md", + "Documentation": "https://libcst.readthedocs.io/en/latest/", + }, license="MIT", packages=setuptools.find_packages(), package_data={ From f3811a0e3f45133b18c6e0281784304270113416 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 29 May 2022 10:27:11 +0100 Subject: [PATCH 247/632] native: add overall benchmark (#692) * Fix benchmarks on windows * add benchmark to cover parse_module --- native/libcst/benches/parser_benchmark.rs | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/native/libcst/benches/parser_benchmark.rs b/native/libcst/benches/parser_benchmark.rs index ad96d867..a00bf6ff 100644 --- a/native/libcst/benches/parser_benchmark.rs +++ b/native/libcst/benches/parser_benchmark.rs @@ -17,6 +17,11 @@ use libcst_native::{ parse_module, parse_tokens_without_whitespace, tokenize, Codegen, Config, Inflate, }; +#[cfg(not(windows))] +const NEWLINE: &str = "\n"; +#[cfg(windows)] +const NEWLINE: &str = "\r\n"; + fn load_all_fixtures() -> String { let mut path = PathBuf::from(file!()); path.pop(); @@ -38,7 +43,7 @@ fn load_all_fixtures() -> String { let path = file.unwrap().path(); std::fs::read_to_string(&path).expect("reading_file") }) - .join("\n") + .join(NEWLINE) } pub fn inflate_benchmarks(c: &mut Criterion) { @@ -82,7 +87,7 @@ pub fn parser_benchmarks(c: &mut Criterion) { pub fn codegen_benchmarks(c: &mut Criterion) { let input = load_all_fixtures(); - let m = parse_module(&input, None).expect("parse failed"); + let m = parse_module(input.as_str(), None).expect("parse failed"); let mut group = c.benchmark_group("codegen"); group.bench_function("all", |b| { b.iter(|| { @@ -102,9 +107,19 @@ pub fn tokenize_benchmarks(c: &mut Criterion) { group.finish(); } +pub fn parse_into_cst_benchmarks(c: &mut Criterion) { + let fixture = load_all_fixtures(); + let mut group = c.benchmark_group("parse_into_cst"); + group.measurement_time(Duration::from_secs(15)); + group.bench_function("all", |b| { + b.iter(|| black_box(parse_module(&fixture, None))) + }); + group.finish(); +} + criterion_group!( name=benches; config = Criterion::default().with_measurement(CyclesPerByte); - targets=parser_benchmarks, codegen_benchmarks, inflate_benchmarks, tokenize_benchmarks + targets=parser_benchmarks, codegen_benchmarks, inflate_benchmarks, tokenize_benchmarks, parse_into_cst_benchmarks ); criterion_main!(benches); From c00d2249c7c62a2b06f1f0d0975d38b8b97fea39 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jun 2022 20:04:53 +0100 Subject: [PATCH 248/632] Bump crossbeam-deque from 0.8.0 to 0.8.1 in /native (#693) Bumps [crossbeam-deque](https://github.com/crossbeam-rs/crossbeam) from 0.8.0 to 0.8.1. - [Release notes](https://github.com/crossbeam-rs/crossbeam/releases) - [Changelog](https://github.com/crossbeam-rs/crossbeam/blob/master/CHANGELOG.md) - [Commits](https://github.com/crossbeam-rs/crossbeam/compare/crossbeam-deque-0.8.0...crossbeam-deque-0.8.1) --- updated-dependencies: - dependency-name: crossbeam-deque dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index fbeaeb78..b0746c0b 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -150,9 +150,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" +checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" dependencies = [ "cfg-if", "crossbeam-epoch", From 8469407206f637235e815ae3ecb72d74edb8e95b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jun 2022 20:23:08 +0100 Subject: [PATCH 249/632] Bump regex from 1.5.4 to 1.5.5 in /native (#694) Bumps [regex](https://github.com/rust-lang/regex) from 1.5.4 to 1.5.5. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.5.4...1.5.5) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index b0746c0b..09358dc2 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -608,9 +608,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.4" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" dependencies = [ "aho-corasick", "memchr", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index d2916e17..1acff0d1 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "4b146b4b78a80c07 chic = "1.2.2" itertools = "0.10.0" once_cell = "1.5.2" -regex = "1.5.4" +regex = "1.5.5" libcst_derive = { path = "../libcst_derive" } [dev-dependencies] From e454cf9f1e08558515d2a8db61586d5ac1a67a1a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jun 2022 20:42:19 +0100 Subject: [PATCH 250/632] Bump crossbeam-utils from 0.8.5 to 0.8.8 in /native (#695) Bumps [crossbeam-utils](https://github.com/crossbeam-rs/crossbeam) from 0.8.5 to 0.8.8. - [Release notes](https://github.com/crossbeam-rs/crossbeam/releases) - [Changelog](https://github.com/crossbeam-rs/crossbeam/blob/master/CHANGELOG.md) - [Commits](https://github.com/crossbeam-rs/crossbeam/compare/crossbeam-utils-0.8.5...crossbeam-utils-0.8.8) --- updated-dependencies: - dependency-name: crossbeam-utils dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 09358dc2..93d5f72b 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -174,9 +174,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" +checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" dependencies = [ "cfg-if", "lazy_static", From 380f045fe05bfdc6a0555f8e36a0c1c406ca1b77 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 7 Jun 2022 11:08:37 +0100 Subject: [PATCH 251/632] parser: use references instead of smart pointers for Tokens (#691) * Add cst_node proc macro * Split CST nodes into Deflated/Inflated versions --- native/Cargo.lock | 57 +- native/libcst/Cargo.toml | 2 +- native/libcst/README.md | 25 +- native/libcst/benches/parser_benchmark.rs | 11 +- native/libcst/src/lib.rs | 31 +- native/libcst/src/nodes/expression.rs | 1372 +++++++----- native/libcst/src/nodes/mod.rs | 83 +- native/libcst/src/nodes/module.rs | 36 +- native/libcst/src/nodes/op.rs | 403 ++-- native/libcst/src/nodes/statement.rs | 1634 ++++++++------ native/libcst/src/nodes/traits.rs | 46 +- native/libcst/src/parser/grammar.rs | 1898 +++++++---------- native/libcst/src/parser/mod.rs | 1 + native/libcst/src/parser/numbers.rs | 2 +- native/libcst_derive/Cargo.toml | 3 + native/libcst_derive/src/cstnode.rs | 455 ++++ native/libcst_derive/src/inflate.rs | 16 +- native/libcst_derive/src/lib.rs | 18 +- .../libcst_derive/src/parenthesized_node.rs | 64 +- .../libcst_derive/tests/pass/minimal_cst.rs | 127 ++ native/libcst_derive/tests/pass/simple.rs | 54 + 21 files changed, 3775 insertions(+), 2563 deletions(-) create mode 100644 native/libcst_derive/src/cstnode.rs create mode 100644 native/libcst_derive/tests/pass/minimal_cst.rs create mode 100644 native/libcst_derive/tests/pass/simple.rs diff --git a/native/Cargo.lock b/native/Cargo.lock index 93d5f72b..f5c747b3 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -216,6 +216,12 @@ version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +[[package]] +name = "glob" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" + [[package]] name = "half" version = "1.7.1" @@ -332,6 +338,7 @@ version = "0.1.0" dependencies = [ "quote", "syn", + "trybuild", ] [[package]] @@ -450,8 +457,9 @@ dependencies = [ [[package]] name = "peg" -version = "0.7.0" -source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af728fe826811af3b38c37e93de6d104485953ea373d656eebae53d6987fcd2c" dependencies = [ "peg-macros", "peg-runtime", @@ -459,8 +467,9 @@ dependencies = [ [[package]] name = "peg-macros" -version = "0.7.0" -source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4536be147b770b824895cbad934fccce8e49f14b4c4946eaa46a6e4a12fcdc16" dependencies = [ "peg-runtime", "proc-macro2", @@ -469,8 +478,9 @@ dependencies = [ [[package]] name = "peg-runtime" -version = "0.7.0" -source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9b0efd3ba03c3a409d44d60425f279ec442bcf0b9e63ff4e410da31c8b0f69f" [[package]] name = "plotters" @@ -670,6 +680,9 @@ name = "serde" version = "1.0.126" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03" +dependencies = [ + "serde_derive", +] [[package]] name = "serde_cbor" @@ -720,6 +733,15 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "termcolor" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +dependencies = [ + "winapi-util", +] + [[package]] name = "textwrap" version = "0.11.0" @@ -759,6 +781,29 @@ dependencies = [ "serde_json", ] +[[package]] +name = "toml" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" +dependencies = [ + "serde", +] + +[[package]] +name = "trybuild" +version = "1.0.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d664de8ea7e531ad4c0f5a834f20b8cb2b8e6dfe88d05796ee7887518ed67b9" +dependencies = [ + "glob", + "lazy_static", + "serde", + "serde_json", + "termcolor", + "toml", +] + [[package]] name = "unicode-width" version = "0.1.8" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 1acff0d1..b70f1e8e 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -32,7 +32,7 @@ trace = ["peg/trace"] paste = "1.0.4" pyo3 = { version = "0.14.4", optional = true } thiserror = "1.0.23" -peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "4b146b4b78a80c07e43d7ace2d97f65bfde279a8" } +peg = "0.8.0" chic = "1.2.2" itertools = "0.10.0" once_cell = "1.5.2" diff --git a/native/libcst/README.md b/native/libcst/README.md index 42eb2f6c..2af0f1f0 100644 --- a/native/libcst/README.md +++ b/native/libcst/README.md @@ -47,6 +47,23 @@ implemented via a macro in `libcst_derive`. ## Hacking +### Nodes +All CST nodes are marked with the `#[cst_node]` proc macro, which duplicates the node types; for a node named `Foo`, there's: + +- `DeflatedFoo`, which is the output of the parsing phase and isn't exposed through the + API of the crate. + - it has two lifetime parameters: `'r` (or `'input` in the grammar) is the lifetime of + `Token` references, and `'a` is the lifetime of `str` slices from the original input + - `TokenRef` fields are contained here, while whitespace fields aren't + - if there aren't any fields that refer to other CST nodes or `TokenRef`s, there's an + extra (private) `_phantom` field that "contains" the two lifetime parameters (this + is to make the type parameters of all `DeflatedFoo` types uniform) + - it implements the `Inflate` trait, which converts `DeflatedFoo` into `Foo` +- `Foo`, which is what's publicly exposed in the crate and is the output of `Inflate`ing `DeflatedFoo`. + - it only retains the second (`'a`) lifetime parameter of `DeflatedFoo` to refer back to slices of the original input string + - whitespace fields are contained here, but `TokenRef`s aren't + - `IntoPy` is implemented for it (assuming the `py` crate feature is enabled), which contains code to translate `Foo` back into a Python object; hence, the fields on `Foo` match the Python CST node implementations (barring fields marked with `#[skip_py]`) + ### Grammar The grammar is mostly a straightforward translation from the [CPython @@ -61,10 +78,10 @@ exceptions: mutually recursive rules, special `invalid_` rules, the `~` operator, terminating the parser early. -The PEG parser is run on a `Vec` of `Token`s, and tries its best to avoid allocating any -strings, working only with references. As such, the output nodes don't own any strings, -but refer to slices of the original input (hence the `'a` lifetime parameter on almost -all nodes). +The PEG parser is run on a `Vec` of `Token`s (more precisely `&'input Vec>`), +and tries its best to avoid allocating any strings, working only with references. As +such, the output nodes don't own any strings, but refer to slices of the original input +(hence the `'input, 'a` lifetime parameters on almost all nodes). ### Whitespace parsing diff --git a/native/libcst/benches/parser_benchmark.rs b/native/libcst/benches/parser_benchmark.rs index a00bf6ff..53ab2e17 100644 --- a/native/libcst/benches/parser_benchmark.rs +++ b/native/libcst/benches/parser_benchmark.rs @@ -49,12 +49,13 @@ fn load_all_fixtures() -> String { pub fn inflate_benchmarks(c: &mut Criterion) { let fixture = load_all_fixtures(); let tokens = tokenize(fixture.as_str()).expect("tokenize failed"); + let tokvec = tokens.clone().into(); let mut group = c.benchmark_group("inflate"); group.bench_function("all", |b| { b.iter_batched( || { let conf = Config::new(fixture.as_str(), &tokens); - let m = parse_tokens_without_whitespace(tokens.clone(), fixture.as_str(), None) + let m = parse_tokens_without_whitespace(&tokvec, fixture.as_str(), None) .expect("parse failed"); (conf, m) }, @@ -71,13 +72,13 @@ pub fn parser_benchmarks(c: &mut Criterion) { group.measurement_time(Duration::from_secs(15)); group.bench_function("all", |b| { b.iter_batched( - || tokenize(fixture.as_str()).expect("tokenize failed"), + || tokenize(fixture.as_str()).expect("tokenize failed").into(), |tokens| { - black_box(parse_tokens_without_whitespace( - tokens, + black_box(drop(parse_tokens_without_whitespace( + &tokens, fixture.as_str(), None, - )) + ))) }, BatchSize::SmallInput, ) diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index d2e475ab..6d2bdd99 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -11,10 +11,11 @@ pub use tokenizer::whitespace_parser::Config; use tokenizer::{whitespace_parser, TokConfig, Token, TokenIterator}; mod nodes; +use nodes::deflated::Module as DeflatedModule; pub use nodes::*; mod parser; -use parser::{ParserError, Result}; +use parser::{ParserError, Result, TokVec}; #[cfg(feature = "py")] pub mod py; @@ -32,15 +33,6 @@ pub fn tokenize(text: &str) -> Result> { .map_err(|err| ParserError::TokenizerError(err, text)) } -pub fn parse_tokens_without_whitespace<'a>( - tokens: Vec>, - module_text: &'a str, - encoding: Option<&str>, -) -> Result<'a, Module<'a>> { - parser::python::file(&tokens.into(), module_text, encoding) - .map_err(|err| ParserError::ParserError(err, module_text)) -} - pub fn parse_module<'a>( mut module_text: &'a str, encoding: Option<&str>, @@ -51,14 +43,26 @@ pub fn parse_module<'a>( } let tokens = tokenize(module_text)?; let conf = whitespace_parser::Config::new(module_text, &tokens); - let m = parse_tokens_without_whitespace(tokens, module_text, encoding)?; + let tokvec = tokens.into(); + let m = parse_tokens_without_whitespace(&tokvec, module_text, encoding)?; Ok(m.inflate(&conf)?) } +pub fn parse_tokens_without_whitespace<'r, 'a>( + tokens: &'r TokVec<'a>, + module_text: &'a str, + encoding: Option<&str>, +) -> Result<'a, DeflatedModule<'r, 'a>> { + let m = parser::python::file(tokens, module_text, encoding) + .map_err(|err| ParserError::ParserError(err, module_text))?; + Ok(m) +} + pub fn parse_statement(text: &str) -> Result { let tokens = tokenize(text)?; let conf = whitespace_parser::Config::new(text, &tokens); - let stm = parser::python::statement_input(&tokens.into(), text) + let tokvec = tokens.into(); + let stm = parser::python::statement_input(&tokvec, text) .map_err(|err| ParserError::ParserError(err, text))?; Ok(stm.inflate(&conf)?) } @@ -66,7 +70,8 @@ pub fn parse_statement(text: &str) -> Result { pub fn parse_expression(text: &str) -> Result { let tokens = tokenize(text)?; let conf = whitespace_parser::Config::new(text, &tokens); - let expr = parser::python::expression_input(&tokens.into(), text) + let tokvec = tokens.into(); + let expr = parser::python::expression_input(&tokvec, text) .map_err(|err| ParserError::ParserError(err, text))?; Ok(expr.inflate(&conf)?) } diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 9345b6c7..261bdd67 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -3,12 +3,14 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -use std::{mem::swap, rc::Rc}; +use std::mem::swap; use crate::{ inflate_helpers::adjust_parameters_trailing_whitespace, nodes::{ - traits::{Inflate, ParenthesizedNode, Result, WithComma}, + op::*, + statement::*, + traits::{Inflate, ParenthesizedDeflatedNode, ParenthesizedNode, Result, WithComma}, whitespace::ParenthesizableWhitespace, Annotation, AssignEqual, AssignTargetExpression, BinaryOp, BooleanOp, Codegen, CodegenState, Colon, Comma, CompOp, Dot, UnaryOp, @@ -20,12 +22,11 @@ use crate::{ }; #[cfg(feature = "py")] use libcst_derive::TryIntoPy; -use libcst_derive::{Codegen, Inflate, ParenthesizedNode}; +use libcst_derive::{cst_node, Codegen, Inflate, ParenthesizedDeflatedNode, ParenthesizedNode}; -type TokenRef<'a> = Rc>; +type TokenRef<'r, 'a> = &'r Token<'a>; -#[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Default)] pub struct Parameters<'a> { pub params: Vec>, pub star_arg: Option>, @@ -46,20 +47,38 @@ impl<'a> Parameters<'a> { } } -impl<'a> Inflate<'a> for Parameters<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.posonly_params = self.posonly_params.inflate(config)?; - self.posonly_ind = self.posonly_ind.inflate(config)?; - self.params = self.params.inflate(config)?; - self.star_arg = self.star_arg.inflate(config)?; - self.kwonly_params = self.kwonly_params.inflate(config)?; - self.star_kwarg = self.star_kwarg.inflate(config)?; - Ok(self) +impl<'r, 'a> DeflatedParameters<'r, 'a> { + pub fn is_empty(&self) -> bool { + self.params.is_empty() + && self.star_arg.is_none() + && self.kwonly_params.is_empty() + && self.star_kwarg.is_none() + && self.posonly_params.is_empty() + && self.posonly_ind.is_none() } } -#[derive(Debug, PartialEq, Eq, Clone, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +impl<'r, 'a> Inflate<'a> for DeflatedParameters<'r, 'a> { + type Inflated = Parameters<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let posonly_params = self.posonly_params.inflate(config)?; + let posonly_ind = self.posonly_ind.inflate(config)?; + let params = self.params.inflate(config)?; + let star_arg = self.star_arg.inflate(config)?; + let kwonly_params = self.kwonly_params.inflate(config)?; + let star_kwarg = self.star_kwarg.inflate(config)?; + Ok(Self::Inflated { + params, + star_arg, + kwonly_params, + star_kwarg, + posonly_params, + posonly_ind, + }) + } +} + +#[cst_node(Inflate)] pub enum StarArg<'a> { Star(Box>), Param(Box>), @@ -119,8 +138,7 @@ impl<'a> Codegen<'a> for Parameters<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ParamSlash<'a> { pub comma: Option>, } @@ -136,15 +154,15 @@ impl<'a> ParamSlash<'a> { } } -impl<'a> Inflate<'a> for ParamSlash<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.comma = self.comma.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedParamSlash<'r, 'a> { + type Inflated = ParamSlash<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let comma = self.comma.inflate(config)?; + Ok(Self::Inflated { comma }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ParamStar<'a> { pub comma: Comma<'a>, } @@ -156,26 +174,31 @@ impl<'a> Codegen<'a> for ParamStar<'a> { } } -impl<'a> Inflate<'a> for ParamStar<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.comma = self.comma.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedParamStar<'r, 'a> { + type Inflated = ParamStar<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let comma = self.comma.inflate(config)?; + Ok(Self::Inflated { comma }) } } -#[derive(Debug, Eq, PartialEq, Default, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode, Default)] pub struct Name<'a> { pub value: &'a str, pub lpar: Vec>, pub rpar: Vec>, } -impl<'a> Inflate<'a> for Name<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedName<'r, 'a> { + type Inflated = Name<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + value: self.value, + lpar, + rpar, + }) } } @@ -187,8 +210,7 @@ impl<'a> Codegen<'a> for Name<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Param<'a> { pub name: Name<'a>, pub annotation: Option>, @@ -205,25 +227,34 @@ pub struct Param<'a> { pub(crate) star_tok: Option>, } -impl<'a> Inflate<'a> for Param<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - // TODO: whitespace_after_param missing? - self.name = self.name.inflate(config)?; - self.annotation = self.annotation.inflate(config)?; - self.equal = self.equal.inflate(config)?; - self.default = self.default.inflate(config)?; - self.comma = self.comma.inflate(config)?; - if let Some(star_tok) = self.star_tok.as_mut() { - self.whitespace_after_star = parse_parenthesizable_whitespace( - config, - &mut star_tok.whitespace_after.borrow_mut(), - )?; - } - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedParam<'r, 'a> { + type Inflated = Param<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let name = self.name.inflate(config)?; + let annotation = self.annotation.inflate(config)?; + let equal = self.equal.inflate(config)?; + let default = self.default.inflate(config)?; + let comma = self.comma.inflate(config)?; + let whitespace_after_star = if let Some(star_tok) = self.star_tok.as_mut() { + parse_parenthesizable_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())? + } else { + Default::default() + }; + let whitespace_after_param = Default::default(); // TODO + Ok(Self::Inflated { + name, + annotation, + equal, + default, + comma, + star: self.star, + whitespace_after_star, + whitespace_after_param, + }) } } -impl<'a> Default for Param<'a> { +impl<'r, 'a> Default for DeflatedParam<'r, 'a> { fn default() -> Self { Self { name: Default::default(), @@ -232,8 +263,6 @@ impl<'a> Default for Param<'a> { default: None, comma: None, star: Some(""), // Note: this preserves a quirk of the pure python parser - whitespace_after_param: Default::default(), - whitespace_after_star: Default::default(), star_tok: None, } } @@ -280,8 +309,7 @@ impl<'a> Param<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Arg<'a> { pub value: Expression<'a>, pub keyword: Option>, @@ -294,20 +322,29 @@ pub struct Arg<'a> { pub(crate) star_tok: Option>, } -impl<'a> Inflate<'a> for Arg<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - if let Some(star_tok) = self.star_tok.as_mut() { - self.whitespace_after_star = parse_parenthesizable_whitespace( - config, - &mut star_tok.whitespace_after.borrow_mut(), - )?; - } - self.keyword = self.keyword.inflate(config)?; - self.equal = self.equal.inflate(config)?; - self.value = self.value.inflate(config)?; - self.comma = self.comma.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedArg<'r, 'a> { + type Inflated = Arg<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let whitespace_after_star = if let Some(star_tok) = self.star_tok.as_mut() { + parse_parenthesizable_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())? + } else { + Default::default() + }; + let keyword = self.keyword.inflate(config)?; + let equal = self.equal.inflate(config)?; + let value = self.value.inflate(config)?; + let comma = self.comma.inflate(config)?; // whitespace_after_arg is handled in Call - Ok(self) + let whitespace_after_arg = Default::default(); + Ok(Self::Inflated { + value, + keyword, + equal, + comma, + star: self.star, + whitespace_after_star, + whitespace_after_arg, + }) } } @@ -335,8 +372,8 @@ impl<'a> Arg<'a> { } } -impl<'a> WithComma<'a> for Arg<'a> { - fn with_comma(self, c: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedArg<'r, 'a> { + fn with_comma(self, c: DeflatedComma<'r, 'a>) -> Self { Self { comma: Some(c), ..self @@ -344,8 +381,8 @@ impl<'a> WithComma<'a> for Arg<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] +#[derive(Default)] pub struct LeftParen<'a> { /// Any space that appears directly after this left parenthesis. pub whitespace_after: ParenthesizableWhitespace<'a>, @@ -360,18 +397,19 @@ impl<'a> Codegen<'a> for LeftParen<'a> { } } -impl<'a> Inflate<'a> for LeftParen<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedLeftParen<'r, 'a> { + type Inflated = LeftParen<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.lpar_tok).whitespace_after.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { whitespace_after }) } } -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] +#[derive(Default)] pub struct RightParen<'a> { /// Any space that appears directly before this right parenthesis. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -386,18 +424,18 @@ impl<'a> Codegen<'a> for RightParen<'a> { } } -impl<'a> Inflate<'a> for RightParen<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedRightParen<'r, 'a> { + type Inflated = RightParen<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.rpar_tok).whitespace_before.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { whitespace_before }) } } -#[derive(Debug, Eq, PartialEq, Clone, ParenthesizedNode, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode, Codegen, Inflate)] pub enum Expression<'a> { Name(Box>), Ellipsis(Box>), @@ -430,8 +468,7 @@ pub enum Expression<'a> { NamedExpr(Box>), } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Ellipsis<'a> { pub lpar: Vec>, pub rpar: Vec>, @@ -444,16 +481,16 @@ impl<'a> Codegen<'a> for Ellipsis<'a> { }) } } -impl<'a> Inflate<'a> for Ellipsis<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedEllipsis<'r, 'a> { + type Inflated = Ellipsis<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { lpar, rpar }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Integer<'a> { /// A string representation of the integer, such as ``"100000"`` or /// ``"100_000"``. @@ -470,16 +507,20 @@ impl<'a> Codegen<'a> for Integer<'a> { } } -impl<'a> Inflate<'a> for Integer<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedInteger<'r, 'a> { + type Inflated = Integer<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + value: self.value, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Float<'a> { /// A string representation of the floating point number, such as ```"0.05"``, /// ``".050"``, or ``"5e-2"``. @@ -496,16 +537,20 @@ impl<'a> Codegen<'a> for Float<'a> { } } -impl<'a> Inflate<'a> for Float<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedFloat<'r, 'a> { + type Inflated = Float<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + value: self.value, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Imaginary<'a> { /// A string representation of the complex number, such as ``"2j"`` pub value: &'a str, @@ -521,16 +566,20 @@ impl<'a> Codegen<'a> for Imaginary<'a> { } } -impl<'a> Inflate<'a> for Imaginary<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedImaginary<'r, 'a> { + type Inflated = Imaginary<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + value: self.value, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Comparison<'a> { pub left: Box>, pub comparisons: Vec>, @@ -548,18 +597,23 @@ impl<'a> Codegen<'a> for Comparison<'a> { }) } } -impl<'a> Inflate<'a> for Comparison<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.left = self.left.inflate(config)?; - self.comparisons = self.comparisons.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedComparison<'r, 'a> { + type Inflated = Comparison<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let left = self.left.inflate(config)?; + let comparisons = self.comparisons.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + left, + comparisons, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct UnaryOperation<'a> { pub operator: UnaryOp<'a>, pub expression: Box>, @@ -576,18 +630,23 @@ impl<'a> Codegen<'a> for UnaryOperation<'a> { } } -impl<'a> Inflate<'a> for UnaryOperation<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.operator = self.operator.inflate(config)?; - self.expression = self.expression.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedUnaryOperation<'r, 'a> { + type Inflated = UnaryOperation<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let operator = self.operator.inflate(config)?; + let expression = self.expression.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + operator, + expression, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct BinaryOperation<'a> { pub left: Box>, pub operator: BinaryOp<'a>, @@ -606,19 +665,25 @@ impl<'a> Codegen<'a> for BinaryOperation<'a> { } } -impl<'a> Inflate<'a> for BinaryOperation<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.left = self.left.inflate(config)?; - self.operator = self.operator.inflate(config)?; - self.right = self.right.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedBinaryOperation<'r, 'a> { + type Inflated = BinaryOperation<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let left = self.left.inflate(config)?; + let operator = self.operator.inflate(config)?; + let right = self.right.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + left, + operator, + right, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct BooleanOperation<'a> { pub left: Box>, pub operator: BooleanOp<'a>, @@ -637,19 +702,25 @@ impl<'a> Codegen<'a> for BooleanOperation<'a> { } } -impl<'a> Inflate<'a> for BooleanOperation<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.left = self.left.inflate(config)?; - self.operator = self.operator.inflate(config)?; - self.right = self.right.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedBooleanOperation<'r, 'a> { + type Inflated = BooleanOperation<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let left = self.left.inflate(config)?; + let operator = self.operator.inflate(config)?; + let right = self.right.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + left, + operator, + right, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Call<'a> { pub func: Box>, pub args: Vec>, @@ -662,21 +733,22 @@ pub struct Call<'a> { pub(crate) rpar_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Call<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.func = self.func.inflate(config)?; - self.whitespace_after_func = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedCall<'r, 'a> { + type Inflated = Call<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let func = self.func.inflate(config)?; + let whitespace_after_func = parse_parenthesizable_whitespace( config, &mut (*self.lpar_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_before_args = parse_parenthesizable_whitespace( + let whitespace_before_args = parse_parenthesizable_whitespace( config, &mut (*self.lpar_tok).whitespace_after.borrow_mut(), )?; - self.args = self.args.inflate(config)?; + let mut args = self.args.inflate(config)?; - if let Some(arg) = self.args.last_mut() { + if let Some(arg) = args.last_mut() { if arg.comma.is_none() { arg.whitespace_after_arg = parse_parenthesizable_whitespace( config, @@ -684,9 +756,16 @@ impl<'a> Inflate<'a> for Call<'a> { )?; } } - self.rpar = self.rpar.inflate(config)?; + let rpar = self.rpar.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + func, + args, + lpar, + rpar, + whitespace_after_func, + whitespace_before_args, + }) } } @@ -706,8 +785,7 @@ impl<'a> Codegen<'a> for Call<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Attribute<'a> { pub value: Box>, pub attr: Name<'a>, @@ -716,14 +794,21 @@ pub struct Attribute<'a> { pub rpar: Vec>, } -impl<'a> Inflate<'a> for Attribute<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.value = self.value.inflate(config)?; - self.dot = self.dot.inflate(config)?; - self.attr = self.attr.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedAttribute<'r, 'a> { + type Inflated = Attribute<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let value = self.value.inflate(config)?; + let dot = self.dot.inflate(config)?; + let attr = self.attr.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + value, + attr, + dot, + lpar, + rpar, + }) } } @@ -737,24 +822,22 @@ impl<'a> Codegen<'a> for Attribute<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, Inflate)] pub enum NameOrAttribute<'a> { N(Box>), A(Box>), } -impl<'a> std::convert::From> for Expression<'a> { - fn from(x: NameOrAttribute<'a>) -> Self { +impl<'r, 'a> std::convert::From> for DeflatedExpression<'r, 'a> { + fn from(x: DeflatedNameOrAttribute<'r, 'a>) -> Self { match x { - NameOrAttribute::N(n) => Self::Name(n), - NameOrAttribute::A(a) => Self::Attribute(a), + DeflatedNameOrAttribute::N(n) => Self::Name(n), + DeflatedNameOrAttribute::A(a) => Self::Attribute(a), } } } -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ComparisonTarget<'a> { pub operator: CompOp<'a>, pub comparator: Expression<'a>, @@ -767,16 +850,19 @@ impl<'a> Codegen<'a> for ComparisonTarget<'a> { } } -impl<'a> Inflate<'a> for ComparisonTarget<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.operator = self.operator.inflate(config)?; - self.comparator = self.comparator.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedComparisonTarget<'r, 'a> { + type Inflated = ComparisonTarget<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let operator = self.operator.inflate(config)?; + let comparator = self.comparator.inflate(config)?; + Ok(Self::Inflated { + operator, + comparator, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct StarredElement<'a> { pub value: Box>, pub comma: Option>, @@ -787,25 +873,33 @@ pub struct StarredElement<'a> { pub(crate) star_tok: TokenRef<'a>, } -impl<'a> StarredElement<'a> { - pub fn inflate_element(mut self, config: &Config<'a>, is_last: bool) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.whitespace_before_value = parse_parenthesizable_whitespace( +impl<'r, 'a> DeflatedStarredElement<'r, 'a> { + pub fn inflate_element(self, config: &Config<'a>, is_last: bool) -> Result> { + let lpar = self.lpar.inflate(config)?; + let whitespace_before_value = parse_parenthesizable_whitespace( config, &mut (*self.star_tok).whitespace_after.borrow_mut(), )?; - self.value = self.value.inflate(config)?; - self.comma = if is_last { + let value = self.value.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + let comma = if is_last { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(self) + Ok(StarredElement { + value, + comma, + lpar, + rpar, + whitespace_before_value, + }) } } -impl<'a> Inflate<'a> for StarredElement<'a> { - fn inflate(self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedStarredElement<'r, 'a> { + type Inflated = StarredElement<'a>; + fn inflate(self, config: &Config<'a>) -> Result { self.inflate_element(config, false) } } @@ -824,7 +918,7 @@ impl<'a> Codegen<'a> for StarredElement<'a> { } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone)] +#[cst_node(NoIntoPy)] pub enum Element<'a> { Simple { value: Expression<'a>, @@ -857,11 +951,12 @@ impl<'a> Element<'a> { state.add_token(if default_comma_whitespace { ", " } else { "," }); } } - - pub fn inflate_element(self, config: &Config<'a>, is_last: bool) -> Result { +} +impl<'r, 'a> DeflatedElement<'r, 'a> { + pub fn inflate_element(self, config: &Config<'a>, is_last: bool) -> Result> { Ok(match self { - Self::Starred(s) => Self::Starred(Box::new(s.inflate_element(config, is_last)?)), - Self::Simple { value, comma } => Self::Simple { + Self::Starred(s) => Element::Starred(Box::new(s.inflate_element(config, is_last)?)), + Self::Simple { value, comma } => Element::Simple { value: value.inflate(config)?, comma: if is_last { comma.map(|c| c.inflate_before(config)).transpose()? @@ -873,8 +968,8 @@ impl<'a> Element<'a> { } } -impl<'a> WithComma<'a> for Element<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedElement<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { let comma = Some(comma); match self { Self::Simple { value, .. } => Self::Simple { comma, value }, @@ -885,38 +980,44 @@ impl<'a> WithComma<'a> for Element<'a> { } } } -impl<'a> std::convert::From> for Element<'a> { - fn from(e: Expression<'a>) -> Self { +impl<'r, 'a> std::convert::From> for DeflatedElement<'r, 'a> { + fn from(e: DeflatedExpression<'r, 'a>) -> Self { match e { - Expression::StarredElement(e) => Element::Starred(e), - value => Element::Simple { value, comma: None }, + DeflatedExpression::StarredElement(e) => Self::Starred(e), + value => Self::Simple { value, comma: None }, } } } -#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode, Default)] pub struct Tuple<'a> { pub elements: Vec>, pub lpar: Vec>, pub rpar: Vec>, } -impl<'a> Inflate<'a> for Tuple<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result> { - self.lpar = self.lpar.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedTuple<'r, 'a> { + type Inflated = Tuple<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; let len = self.elements.len(); - self.elements = self + let elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) .collect::>>()?; - if !self.elements.is_empty() { + let rpar = if !elements.is_empty() { // rpar only has whitespace if elements is non empty - self.rpar = self.rpar.inflate(config)?; - } - Ok(self) + self.rpar.inflate(config)? + } else { + vec![Default::default()] + }; + Ok(Self::Inflated { + elements, + lpar, + rpar, + }) } } @@ -935,8 +1036,7 @@ impl<'a> Codegen<'a> for Tuple<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct GeneratorExp<'a> { pub elt: Box>, pub for_in: Box>, @@ -953,18 +1053,23 @@ impl<'a> Codegen<'a> for GeneratorExp<'a> { } } -impl<'a> Inflate<'a> for GeneratorExp<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.elt = self.elt.inflate(config)?; - self.for_in = self.for_in.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedGeneratorExp<'r, 'a> { + type Inflated = GeneratorExp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let elt = self.elt.inflate(config)?; + let for_in = self.for_in.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + elt, + for_in, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct ListComp<'a> { pub elt: Box>, pub for_in: Box>, @@ -985,20 +1090,28 @@ impl<'a> Codegen<'a> for ListComp<'a> { } } -impl<'a> Inflate<'a> for ListComp<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.lbracket = self.lbracket.inflate(config)?; - self.elt = self.elt.inflate(config)?; - self.for_in = self.for_in.inflate(config)?; - self.rbracket = self.rbracket.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedListComp<'r, 'a> { + type Inflated = ListComp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let lbracket = self.lbracket.inflate(config)?; + let elt = self.elt.inflate(config)?; + let for_in = self.for_in.inflate(config)?; + let rbracket = self.rbracket.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + elt, + for_in, + lbracket, + rbracket, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] +#[derive(Default)] pub struct LeftSquareBracket<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1011,18 +1124,19 @@ impl<'a> Codegen<'a> for LeftSquareBracket<'a> { } } -impl<'a> Inflate<'a> for LeftSquareBracket<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedLeftSquareBracket<'r, 'a> { + type Inflated = LeftSquareBracket<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { whitespace_after }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] +#[derive(Default)] pub struct RightSquareBracket<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1035,18 +1149,18 @@ impl<'a> Codegen<'a> for RightSquareBracket<'a> { } } -impl<'a> Inflate<'a> for RightSquareBracket<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedRightSquareBracket<'r, 'a> { + type Inflated = RightSquareBracket<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { whitespace_before }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct SetComp<'a> { pub elt: Box>, pub for_in: Box>, @@ -1056,15 +1170,23 @@ pub struct SetComp<'a> { pub rpar: Vec>, } -impl<'a> Inflate<'a> for SetComp<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.lbrace = self.lbrace.inflate(config)?; - self.elt = self.elt.inflate(config)?; - self.for_in = self.for_in.inflate(config)?; - self.rbrace = self.rbrace.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedSetComp<'r, 'a> { + type Inflated = SetComp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let lbrace = self.lbrace.inflate(config)?; + let elt = self.elt.inflate(config)?; + let for_in = self.for_in.inflate(config)?; + let rbrace = self.rbrace.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + elt, + for_in, + lbrace, + rbrace, + lpar, + rpar, + }) } } @@ -1079,8 +1201,7 @@ impl<'a> Codegen<'a> for SetComp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct DictComp<'a> { pub key: Box>, pub value: Box>, @@ -1095,24 +1216,35 @@ pub struct DictComp<'a> { pub(crate) colon_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for DictComp<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.lbrace = self.lbrace.inflate(config)?; - self.key = self.key.inflate(config)?; - self.whitespace_before_colon = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedDictComp<'r, 'a> { + type Inflated = DictComp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let lbrace = self.lbrace.inflate(config)?; + let key = self.key.inflate(config)?; + let whitespace_before_colon = parse_parenthesizable_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after_colon = parse_parenthesizable_whitespace( + let whitespace_after_colon = parse_parenthesizable_whitespace( config, &mut (*self.colon_tok).whitespace_after.borrow_mut(), )?; - self.value = self.value.inflate(config)?; - self.for_in = self.for_in.inflate(config)?; - self.rbrace = self.rbrace.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let value = self.value.inflate(config)?; + let for_in = self.for_in.inflate(config)?; + let rbrace = self.rbrace.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + key, + value, + for_in, + lbrace, + rbrace, + lpar, + rpar, + whitespace_before_colon, + whitespace_after_colon, + }) } } @@ -1131,20 +1263,28 @@ impl<'a> Codegen<'a> for DictComp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct LeftCurlyBrace<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for LeftCurlyBrace<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after = parse_parenthesizable_whitespace( +impl<'a> Default for LeftCurlyBrace<'a> { + fn default() -> Self { + Self { + whitespace_after: Default::default(), + } + } +} + +impl<'r, 'a> Inflate<'a> for DeflatedLeftCurlyBrace<'r, 'a> { + type Inflated = LeftCurlyBrace<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { whitespace_after }) } } @@ -1155,20 +1295,28 @@ impl<'a> Codegen<'a> for LeftCurlyBrace<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct RightCurlyBrace<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for RightCurlyBrace<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Default for RightCurlyBrace<'a> { + fn default() -> Self { + Self { + whitespace_before: Default::default(), + } + } +} + +impl<'r, 'a> Inflate<'a> for DeflatedRightCurlyBrace<'r, 'a> { + type Inflated = RightCurlyBrace<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { whitespace_before }) } } @@ -1179,8 +1327,7 @@ impl<'a> Codegen<'a> for RightCurlyBrace<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct CompFor<'a> { pub target: AssignTargetExpression<'a>, pub iter: Expression<'a>, @@ -1219,44 +1366,59 @@ impl<'a> Codegen<'a> for CompFor<'a> { } } -impl<'a> Inflate<'a> for CompFor<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedCompFor<'r, 'a> { + type Inflated = CompFor<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let mut whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.for_tok).whitespace_before.borrow_mut(), )?; - if let (Some(asy_tok), Some(asy)) = (self.async_tok.as_mut(), self.asynchronous.as_mut()) { + let asynchronous = if let Some(asy_tok) = self.async_tok.as_mut() { // If there is an async keyword, the start of the CompFor expression is // considered to be this keyword, so whitespace_before needs to adjust but // Asynchronous will own the whitespace before the for token. - asy.whitespace_after = parse_parenthesizable_whitespace( + let mut asy_whitespace_after = parse_parenthesizable_whitespace( config, &mut asy_tok.whitespace_before.borrow_mut(), )?; - swap(&mut asy.whitespace_after, &mut self.whitespace_before); - } - self.whitespace_after_for = parse_parenthesizable_whitespace( + swap(&mut asy_whitespace_after, &mut whitespace_before); + Some(Asynchronous { + whitespace_after: asy_whitespace_after, + }) + } else { + None + }; + let whitespace_after_for = parse_parenthesizable_whitespace( config, &mut (*self.for_tok).whitespace_after.borrow_mut(), )?; - self.target = self.target.inflate(config)?; - self.whitespace_before_in = parse_parenthesizable_whitespace( + let target = self.target.inflate(config)?; + let whitespace_before_in = parse_parenthesizable_whitespace( config, &mut (*self.in_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after_in = parse_parenthesizable_whitespace( + let whitespace_after_in = parse_parenthesizable_whitespace( config, &mut (*self.in_tok).whitespace_after.borrow_mut(), )?; - self.iter = self.iter.inflate(config)?; - self.ifs = self.ifs.inflate(config)?; - self.inner_for_in = self.inner_for_in.inflate(config)?; - Ok(self) + let iter = self.iter.inflate(config)?; + let ifs = self.ifs.inflate(config)?; + let inner_for_in = self.inner_for_in.inflate(config)?; + Ok(Self::Inflated { + target, + iter, + ifs, + inner_for_in, + asynchronous, + whitespace_before, + whitespace_after_for, + whitespace_before_in, + whitespace_after_in, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Asynchronous<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, } @@ -1268,8 +1430,13 @@ impl<'a> Codegen<'a> for Asynchronous<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +pub(crate) fn make_async<'r, 'a>() -> DeflatedAsynchronous<'r, 'a> { + DeflatedAsynchronous { + _phantom: Default::default(), + } +} + +#[cst_node] pub struct CompIf<'a> { pub test: Expression<'a>, pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -1287,23 +1454,27 @@ impl<'a> Codegen<'a> for CompIf<'a> { } } -impl<'a> Inflate<'a> for CompIf<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedCompIf<'r, 'a> { + type Inflated = CompIf<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.if_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_before_test = parse_parenthesizable_whitespace( + let whitespace_before_test = parse_parenthesizable_whitespace( config, &mut (*self.if_tok).whitespace_after.borrow_mut(), )?; - self.test = self.test.inflate(config)?; - Ok(self) + let test = self.test.inflate(config)?; + Ok(Self::Inflated { + test, + whitespace_before, + whitespace_before_test, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct List<'a> { pub elements: Vec>, pub lbracket: LeftSquareBracket<'a>, @@ -1312,23 +1483,32 @@ pub struct List<'a> { pub rpar: Vec>, } -impl<'a> Inflate<'a> for List<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.lbracket = self.lbracket.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedList<'r, 'a> { + type Inflated = List<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let lbracket = self.lbracket.inflate(config)?; let len = self.elements.len(); - self.elements = self + let elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) - .collect::>()?; - if !self.elements.is_empty() { + .collect::>>()?; + let rbracket = if !elements.is_empty() { // lbracket owns all the whitespace if there are no elements - self.rbracket = self.rbracket.inflate(config)?; - } - self.rpar = self.rpar.inflate(config)?; - Ok(self) + self.rbracket.inflate(config)? + } else { + Default::default() + }; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + elements, + lbracket, + rbracket, + lpar, + rpar, + }) } } @@ -1345,8 +1525,7 @@ impl<'a> Codegen<'a> for List<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Set<'a> { pub elements: Vec>, pub lbrace: LeftCurlyBrace<'a>, @@ -1355,22 +1534,31 @@ pub struct Set<'a> { pub rpar: Vec>, } -impl<'a> Inflate<'a> for Set<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.lbrace = self.lbrace.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedSet<'r, 'a> { + type Inflated = Set<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let lbrace = self.lbrace.inflate(config)?; let len = self.elements.len(); - self.elements = self + let elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) - .collect::>()?; - if !self.elements.is_empty() { - self.rbrace = self.rbrace.inflate(config)?; - } - self.rpar = self.rpar.inflate(config)?; - Ok(self) + .collect::>>()?; + let rbrace = if !elements.is_empty() { + self.rbrace.inflate(config)? + } else { + Default::default() + }; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + elements, + lbrace, + rbrace, + lpar, + rpar, + }) } } @@ -1387,8 +1575,7 @@ impl<'a> Codegen<'a> for Set<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Dict<'a> { pub elements: Vec>, pub lbrace: LeftCurlyBrace<'a>, @@ -1397,22 +1584,31 @@ pub struct Dict<'a> { pub rpar: Vec>, } -impl<'a> Inflate<'a> for Dict<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.lbrace = self.lbrace.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedDict<'r, 'a> { + type Inflated = Dict<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let lbrace = self.lbrace.inflate(config)?; let len = self.elements.len(); - self.elements = self + let elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) - .collect::>()?; - if !self.elements.is_empty() { - self.rbrace = self.rbrace.inflate(config)?; - } - self.rpar = self.rpar.inflate(config)?; - Ok(self) + .collect::>>()?; + let rbrace = if !elements.is_empty() { + self.rbrace.inflate(config)? + } else { + Default::default() + }; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + elements, + lbrace, + rbrace, + lpar, + rpar, + }) } } @@ -1429,7 +1625,7 @@ impl<'a> Codegen<'a> for Dict<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] +#[cst_node(NoIntoPy)] pub enum DictElement<'a> { Simple { key: Expression<'a>, @@ -1442,10 +1638,14 @@ pub enum DictElement<'a> { Starred(StarredDictElement<'a>), } -impl<'a> DictElement<'a> { - pub fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result { +impl<'r, 'a> DeflatedDictElement<'r, 'a> { + pub fn inflate_element( + self, + config: &Config<'a>, + last_element: bool, + ) -> Result> { Ok(match self { - Self::Starred(s) => Self::Starred(s.inflate_element(config, last_element)?), + Self::Starred(s) => DictElement::Starred(s.inflate_element(config, last_element)?), Self::Simple { key, value, @@ -1461,7 +1661,7 @@ impl<'a> DictElement<'a> { config, &mut colon_tok.whitespace_after.borrow_mut(), )?; - Self::Simple { + DictElement::Simple { key: key.inflate(config)?, whitespace_before_colon, whitespace_after_colon, @@ -1471,7 +1671,6 @@ impl<'a> DictElement<'a> { } else { comma.inflate(config) }?, - colon_tok, } } }) @@ -1515,32 +1714,27 @@ impl<'a> DictElement<'a> { } } -impl<'a> WithComma<'a> for DictElement<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedDictElement<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { let comma = Some(comma); match self { - Self::Starred(s) => Self::Starred(StarredDictElement { comma, ..s }), + Self::Starred(s) => Self::Starred(DeflatedStarredDictElement { comma, ..s }), Self::Simple { key, value, - whitespace_before_colon, - whitespace_after_colon, colon_tok, .. } => Self::Simple { comma, key, value, - whitespace_after_colon, - whitespace_before_colon, colon_tok, }, } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct StarredDictElement<'a> { pub value: Expression<'a>, pub comma: Option>, @@ -1549,19 +1743,27 @@ pub struct StarredDictElement<'a> { pub(crate) star_tok: TokenRef<'a>, } -impl<'a> StarredDictElement<'a> { - fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { - self.whitespace_before_value = parse_parenthesizable_whitespace( +impl<'r, 'a> DeflatedStarredDictElement<'r, 'a> { + fn inflate_element( + self, + config: &Config<'a>, + last_element: bool, + ) -> Result> { + let whitespace_before_value = parse_parenthesizable_whitespace( config, &mut (*self.star_tok).whitespace_after.borrow_mut(), )?; - self.value = self.value.inflate(config)?; - self.comma = if last_element { + let value = self.value.inflate(config)?; + let comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(self) + Ok(StarredDictElement { + value, + comma, + whitespace_before_value, + }) } } @@ -1576,23 +1778,22 @@ impl<'a> Codegen<'a> for StarredDictElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, Inflate)] pub enum BaseSlice<'a> { Index(Box>), Slice(Box>), } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Index<'a> { pub value: Expression<'a>, } -impl<'a> Inflate<'a> for Index<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.value = self.value.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedIndex<'r, 'a> { + type Inflated = Index<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let value = self.value.inflate(config)?; + Ok(Self::Inflated { value }) } } @@ -1602,8 +1803,7 @@ impl<'a> Codegen<'a> for Index<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Slice<'a> { #[cfg_attr(feature = "py", no_py_default)] pub lower: Option>, @@ -1614,14 +1814,21 @@ pub struct Slice<'a> { pub second_colon: Option>, } -impl<'a> Inflate<'a> for Slice<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lower = self.lower.inflate(config)?; - self.first_colon = self.first_colon.inflate(config)?; - self.upper = self.upper.inflate(config)?; - self.second_colon = self.second_colon.inflate(config)?; - self.step = self.step.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedSlice<'r, 'a> { + type Inflated = Slice<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lower = self.lower.inflate(config)?; + let first_colon = self.first_colon.inflate(config)?; + let upper = self.upper.inflate(config)?; + let second_colon = self.second_colon.inflate(config)?; + let step = self.step.inflate(config)?; + Ok(Self::Inflated { + lower, + upper, + step, + first_colon, + second_colon, + }) } } @@ -1645,18 +1852,18 @@ impl<'a> Codegen<'a> for Slice<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct SubscriptElement<'a> { pub slice: BaseSlice<'a>, pub comma: Option>, } -impl<'a> Inflate<'a> for SubscriptElement<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.slice = self.slice.inflate(config)?; - self.comma = self.comma.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedSubscriptElement<'r, 'a> { + type Inflated = SubscriptElement<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let slice = self.slice.inflate(config)?; + let comma = self.comma.inflate(config)?; + Ok(Self::Inflated { slice, comma }) } } @@ -1669,8 +1876,7 @@ impl<'a> Codegen<'a> for SubscriptElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Subscript<'a> { pub value: Box>, pub slice: Vec>, @@ -1679,23 +1885,30 @@ pub struct Subscript<'a> { pub lpar: Vec>, pub rpar: Vec>, pub whitespace_after_value: ParenthesizableWhitespace<'a>, - - pub(crate) lbracket_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Subscript<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.value = self.value.inflate(config)?; - self.whitespace_after_value = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedSubscript<'r, 'a> { + type Inflated = Subscript<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let value = self.value.inflate(config)?; + let whitespace_after_value = parse_parenthesizable_whitespace( config, - &mut (*self.lbracket_tok).whitespace_before.borrow_mut(), + &mut self.lbracket.tok.whitespace_before.borrow_mut(), )?; - self.lbracket = self.lbracket.inflate(config)?; - self.slice = self.slice.inflate(config)?; - self.rbracket = self.rbracket.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let lbracket = self.lbracket.inflate(config)?; + let slice = self.slice.inflate(config)?; + let rbracket = self.rbracket.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + value, + slice, + lbracket, + rbracket, + lpar, + rpar, + whitespace_after_value, + }) } } @@ -1717,8 +1930,7 @@ impl<'a> Codegen<'a> for Subscript<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct IfExp<'a> { pub test: Box>, pub body: Box>, @@ -1734,30 +1946,41 @@ pub struct IfExp<'a> { pub(crate) else_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for IfExp<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.body = self.body.inflate(config)?; - self.whitespace_before_if = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedIfExp<'r, 'a> { + type Inflated = IfExp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let body = self.body.inflate(config)?; + let whitespace_before_if = parse_parenthesizable_whitespace( config, &mut (*self.if_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after_if = parse_parenthesizable_whitespace( + let whitespace_after_if = parse_parenthesizable_whitespace( config, &mut (*self.if_tok).whitespace_after.borrow_mut(), )?; - self.test = self.test.inflate(config)?; - self.whitespace_before_else = parse_parenthesizable_whitespace( + let test = self.test.inflate(config)?; + let whitespace_before_else = parse_parenthesizable_whitespace( config, &mut (*self.else_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after_else = parse_parenthesizable_whitespace( + let whitespace_after_else = parse_parenthesizable_whitespace( config, &mut (*self.else_tok).whitespace_after.borrow_mut(), )?; - self.orelse = self.orelse.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let orelse = self.orelse.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + test, + body, + orelse, + lpar, + rpar, + whitespace_before_if, + whitespace_after_if, + whitespace_before_else, + whitespace_after_else, + }) } } @@ -1777,8 +2000,7 @@ impl<'a> Codegen<'a> for IfExp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Lambda<'a> { pub params: Box>, pub body: Box>, @@ -1790,21 +2012,31 @@ pub struct Lambda<'a> { pub(crate) lambda_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Lambda<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - if !self.params.is_empty() { - self.whitespace_after_lambda = Some(parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedLambda<'r, 'a> { + type Inflated = Lambda<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let whitespace_after_lambda = if !self.params.is_empty() { + Some(parse_parenthesizable_whitespace( config, &mut (*self.lambda_tok).whitespace_after.borrow_mut(), - )?); - } - self.params = self.params.inflate(config)?; - adjust_parameters_trailing_whitespace(config, &mut self.params, &self.colon.tok)?; - self.colon = self.colon.inflate(config)?; - self.body = self.body.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + )?) + } else { + Default::default() + }; + let mut params = self.params.inflate(config)?; + adjust_parameters_trailing_whitespace(config, &mut params, &self.colon.tok)?; + let colon = self.colon.inflate(config)?; + let body = self.body.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + params, + body, + colon, + lpar, + rpar, + whitespace_after_lambda, + }) } } @@ -1825,8 +2057,7 @@ impl<'a> Codegen<'a> for Lambda<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct From<'a> { pub item: Expression<'a>, pub whitespace_before_from: Option>, @@ -1848,36 +2079,41 @@ impl<'a> From<'a> { } } -impl<'a> Inflate<'a> for From<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before_from = Some(parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedFrom<'r, 'a> { + type Inflated = From<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before_from = Some(parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?); - self.whitespace_after_from = parse_parenthesizable_whitespace( + let whitespace_after_from = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - self.item = self.item.inflate(config)?; - Ok(self) + let item = self.item.inflate(config)?; + Ok(Self::Inflated { + item, + whitespace_before_from, + whitespace_after_from, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub enum YieldValue<'a> { Expression(Box>), From(Box>), } -impl<'a> Inflate<'a> for YieldValue<'a> { - fn inflate(self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedYieldValue<'r, 'a> { + type Inflated = YieldValue<'a>; + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { - Self::Expression(e) => Self::Expression(e.inflate(config)?), + Self::Expression(e) => Self::Inflated::Expression(e.inflate(config)?), Self::From(e) => { let mut e = e.inflate(config)?; e.whitespace_before_from = None; - Self::From(e) + Self::Inflated::From(e) } }) } @@ -1892,8 +2128,7 @@ impl<'a> YieldValue<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Yield<'a> { pub value: Option>>, pub lpar: Vec>, @@ -1903,18 +2138,26 @@ pub struct Yield<'a> { pub(crate) yield_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Yield<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - if self.value.is_some() { - self.whitespace_after_yield = Some(parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedYield<'r, 'a> { + type Inflated = Yield<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let whitespace_after_yield = if self.value.is_some() { + Some(parse_parenthesizable_whitespace( config, &mut (*self.yield_tok).whitespace_after.borrow_mut(), - )?); - } - self.value = self.value.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + )?) + } else { + Default::default() + }; + let value = self.value.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + value, + lpar, + rpar, + whitespace_after_yield, + }) } } @@ -1935,8 +2178,7 @@ impl<'a> Codegen<'a> for Yield<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct Await<'a> { pub expression: Box>, pub lpar: Vec>, @@ -1946,16 +2188,22 @@ pub struct Await<'a> { pub(crate) await_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Await<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.whitespace_after_await = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedAwait<'r, 'a> { + type Inflated = Await<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let whitespace_after_await = parse_parenthesizable_whitespace( config, &mut (*self.await_tok).whitespace_after.borrow_mut(), )?; - self.expression = self.expression.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let expression = self.expression.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + expression, + lpar, + rpar, + whitespace_after_await, + }) } } @@ -1969,26 +2217,24 @@ impl<'a> Codegen<'a> for Await<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, Inflate)] pub enum String<'a> { Simple(SimpleString<'a>), Concatenated(ConcatenatedString<'a>), Formatted(FormattedString<'a>), } -impl<'a> std::convert::From> for Expression<'a> { - fn from(s: String<'a>) -> Self { +impl<'r, 'a> std::convert::From> for DeflatedExpression<'r, 'a> { + fn from(s: DeflatedString<'r, 'a>) -> Self { match s { - String::Simple(s) => Self::SimpleString(Box::new(s)), - String::Concatenated(s) => Self::ConcatenatedString(Box::new(s)), - String::Formatted(s) => Self::FormattedString(Box::new(s)), + DeflatedString::Simple(s) => Self::SimpleString(Box::new(s)), + DeflatedString::Concatenated(s) => Self::ConcatenatedString(Box::new(s)), + DeflatedString::Formatted(s) => Self::FormattedString(Box::new(s)), } } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct ConcatenatedString<'a> { pub left: Box>, pub right: Box>, @@ -2001,17 +2247,24 @@ pub struct ConcatenatedString<'a> { pub(crate) right_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for ConcatenatedString<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.left = self.left.inflate(config)?; - self.whitespace_between = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedConcatenatedString<'r, 'a> { + type Inflated = ConcatenatedString<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let left = self.left.inflate(config)?; + let whitespace_between = parse_parenthesizable_whitespace( config, &mut (*self.right_tok).whitespace_before.borrow_mut(), )?; - self.right = self.right.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let right = self.right.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + left, + right, + lpar, + rpar, + whitespace_between, + }) } } @@ -2025,8 +2278,7 @@ impl<'a> Codegen<'a> for ConcatenatedString<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode, Default)] pub struct SimpleString<'a> { /// The texual representation of the string, including quotes, prefix /// characters, and any escape characters present in the original source code, @@ -2036,11 +2288,16 @@ pub struct SimpleString<'a> { pub rpar: Vec>, } -impl<'a> Inflate<'a> for SimpleString<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedSimpleString<'r, 'a> { + type Inflated = SimpleString<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + value: self.value, + lpar, + rpar, + }) } } @@ -2050,15 +2307,15 @@ impl<'a> Codegen<'a> for SimpleString<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct FormattedStringText<'a> { pub value: &'a str, } -impl<'a> Inflate<'a> for FormattedStringText<'a> { - fn inflate(self, _config: &Config<'a>) -> Result { - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedFormattedStringText<'r, 'a> { + type Inflated = FormattedStringText<'a>; + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(Self::Inflated { value: self.value }) } } @@ -2068,8 +2325,14 @@ impl<'a> Codegen<'a> for FormattedStringText<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +pub(crate) fn make_fstringtext<'r, 'a>(value: &'a str) -> DeflatedFormattedStringText<'r, 'a> { + DeflatedFormattedStringText { + value, + _phantom: Default::default(), + } +} + +#[cst_node] pub struct FormattedStringExpression<'a> { pub expression: Expression<'a>, pub conversion: Option<&'a str>, @@ -2084,22 +2347,33 @@ pub struct FormattedStringExpression<'a> { pub(crate) after_expr_tok: Option>, } -impl<'a> Inflate<'a> for FormattedStringExpression<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before_expression = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedFormattedStringExpression<'r, 'a> { + type Inflated = FormattedStringExpression<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let whitespace_before_expression = parse_parenthesizable_whitespace( config, &mut (*self.lbrace_tok).whitespace_after.borrow_mut(), )?; - self.expression = self.expression.inflate(config)?; - self.equal = self.equal.inflate(config)?; - if let Some(after_expr_tok) = self.after_expr_tok.as_mut() { - self.whitespace_after_expression = parse_parenthesizable_whitespace( + let expression = self.expression.inflate(config)?; + let equal = self.equal.inflate(config)?; + let whitespace_after_expression = if let Some(after_expr_tok) = self.after_expr_tok.as_mut() + { + parse_parenthesizable_whitespace( config, &mut after_expr_tok.whitespace_before.borrow_mut(), - )?; - } - self.format_spec = self.format_spec.inflate(config)?; - Ok(self) + )? + } else { + Default::default() + }; + let format_spec = self.format_spec.inflate(config)?; + Ok(Self::Inflated { + expression, + conversion: self.conversion, + format_spec, + whitespace_before_expression, + whitespace_after_expression, + equal, + }) } } @@ -2126,15 +2400,13 @@ impl<'a> Codegen<'a> for FormattedStringExpression<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, Inflate)] pub enum FormattedStringContent<'a> { Text(FormattedStringText<'a>), Expression(Box>), } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct FormattedString<'a> { pub parts: Vec>, pub start: &'a str, @@ -2143,12 +2415,19 @@ pub struct FormattedString<'a> { pub rpar: Vec>, } -impl<'a> Inflate<'a> for FormattedString<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.parts = self.parts.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedFormattedString<'r, 'a> { + type Inflated = FormattedString<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let parts = self.parts.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + parts, + start: self.start, + end: self.end, + lpar, + rpar, + }) } } @@ -2164,8 +2443,7 @@ impl<'a> Codegen<'a> for FormattedString<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct NamedExpr<'a> { pub target: Box>, pub value: Box>, @@ -2190,21 +2468,29 @@ impl<'a> Codegen<'a> for NamedExpr<'a> { } } -impl<'a> Inflate<'a> for NamedExpr<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.target = self.target.inflate(config)?; - self.whitespace_before_walrus = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedNamedExpr<'r, 'a> { + type Inflated = NamedExpr<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let target = self.target.inflate(config)?; + let whitespace_before_walrus = parse_parenthesizable_whitespace( config, &mut self.walrus_tok.whitespace_before.borrow_mut(), )?; - self.whitespace_after_walrus = parse_parenthesizable_whitespace( + let whitespace_after_walrus = parse_parenthesizable_whitespace( config, &mut self.walrus_tok.whitespace_after.borrow_mut(), )?; - self.value = self.value.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let value = self.value.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + target, + value, + lpar, + rpar, + whitespace_before_walrus, + whitespace_after_walrus, + }) } } diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index 58fa42d1..d6d8152d 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -3,12 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree -mod whitespace; +pub(crate) mod whitespace; pub use whitespace::{ Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, SimpleWhitespace, TrailingWhitespace, }; -mod statement; +pub(crate) mod statement; pub use statement::{ AnnAssign, Annotation, AsName, Assert, Assign, AssignTarget, AssignTargetExpression, AugAssign, Break, ClassDef, CompoundStatement, Continue, Decorator, Del, DelTargetExpression, Else, @@ -21,7 +21,7 @@ pub use statement::{ TryStar, While, With, WithItem, }; -mod expression; +pub(crate) mod expression; pub use expression::{ Arg, Asynchronous, Attribute, Await, BaseSlice, BinaryOperation, BooleanOperation, Call, CompFor, CompIf, Comparison, ComparisonTarget, ConcatenatedString, Dict, DictComp, DictElement, @@ -33,13 +33,13 @@ pub use expression::{ StarredElement, String, Subscript, SubscriptElement, Tuple, UnaryOperation, Yield, YieldValue, }; -mod op; +pub(crate) mod op; pub use op::{ AssignEqual, AugOp, BinaryOp, BitOr, BooleanOp, Colon, Comma, CompOp, Dot, ImportStar, Semicolon, UnaryOp, }; -mod module; +pub(crate) mod module; pub use module::Module; mod codegen; @@ -49,3 +49,76 @@ pub(crate) mod traits; pub use traits::{Inflate, ParenthesizedNode, WithComma, WithLeadingLines}; pub(crate) mod inflate_helpers; + +pub(crate) mod deflated { + pub use super::expression::{ + DeflatedArg as Arg, DeflatedAsynchronous as Asynchronous, DeflatedAttribute as Attribute, + DeflatedAwait as Await, DeflatedBaseSlice as BaseSlice, + DeflatedBinaryOperation as BinaryOperation, DeflatedBooleanOperation as BooleanOperation, + DeflatedCall as Call, DeflatedCompFor as CompFor, DeflatedCompIf as CompIf, + DeflatedComparison as Comparison, DeflatedComparisonTarget as ComparisonTarget, + DeflatedConcatenatedString as ConcatenatedString, DeflatedDict as Dict, + DeflatedDictComp as DictComp, DeflatedDictElement as DictElement, + DeflatedElement as Element, DeflatedEllipsis as Ellipsis, DeflatedExpression as Expression, + DeflatedFloat as Float, DeflatedFormattedString as FormattedString, + DeflatedFormattedStringContent as FormattedStringContent, + DeflatedFormattedStringExpression as FormattedStringExpression, + DeflatedFormattedStringText as FormattedStringText, DeflatedFrom as From, + DeflatedGeneratorExp as GeneratorExp, DeflatedIfExp as IfExp, + DeflatedImaginary as Imaginary, DeflatedIndex as Index, DeflatedInteger as Integer, + DeflatedLambda as Lambda, DeflatedLeftCurlyBrace as LeftCurlyBrace, + DeflatedLeftParen as LeftParen, DeflatedLeftSquareBracket as LeftSquareBracket, + DeflatedList as List, DeflatedListComp as ListComp, DeflatedName as Name, + DeflatedNameOrAttribute as NameOrAttribute, DeflatedNamedExpr as NamedExpr, + DeflatedParam as Param, DeflatedParamSlash as ParamSlash, DeflatedParamStar as ParamStar, + DeflatedParameters as Parameters, DeflatedRightCurlyBrace as RightCurlyBrace, + DeflatedRightParen as RightParen, DeflatedRightSquareBracket as RightSquareBracket, + DeflatedSet as Set, DeflatedSetComp as SetComp, DeflatedSimpleString as SimpleString, + DeflatedSlice as Slice, DeflatedStarArg as StarArg, + DeflatedStarredDictElement as StarredDictElement, DeflatedStarredElement as StarredElement, + DeflatedString as String, DeflatedSubscript as Subscript, + DeflatedSubscriptElement as SubscriptElement, DeflatedTuple as Tuple, + DeflatedUnaryOperation as UnaryOperation, DeflatedYield as Yield, + DeflatedYieldValue as YieldValue, + }; + pub use super::module::DeflatedModule as Module; + pub use super::op::{ + DeflatedAssignEqual as AssignEqual, DeflatedAugOp as AugOp, DeflatedBinaryOp as BinaryOp, + DeflatedBitOr as BitOr, DeflatedBooleanOp as BooleanOp, DeflatedColon as Colon, + DeflatedComma as Comma, DeflatedCompOp as CompOp, DeflatedDot as Dot, + DeflatedImportStar as ImportStar, DeflatedSemicolon as Semicolon, + DeflatedUnaryOp as UnaryOp, + }; + pub use super::statement::{ + DeflatedAnnAssign as AnnAssign, DeflatedAnnotation as Annotation, DeflatedAsName as AsName, + DeflatedAssert as Assert, DeflatedAssign as Assign, DeflatedAssignTarget as AssignTarget, + DeflatedAssignTargetExpression as AssignTargetExpression, DeflatedAugAssign as AugAssign, + DeflatedBreak as Break, DeflatedClassDef as ClassDef, + DeflatedCompoundStatement as CompoundStatement, DeflatedContinue as Continue, + DeflatedDecorator as Decorator, DeflatedDel as Del, + DeflatedDelTargetExpression as DelTargetExpression, DeflatedElse as Else, + DeflatedExceptHandler as ExceptHandler, DeflatedExceptStarHandler as ExceptStarHandler, + DeflatedExpr as Expr, DeflatedFinally as Finally, DeflatedFor as For, + DeflatedFunctionDef as FunctionDef, DeflatedGlobal as Global, DeflatedIf as If, + DeflatedImport as Import, DeflatedImportAlias as ImportAlias, + DeflatedImportFrom as ImportFrom, DeflatedImportNames as ImportNames, + DeflatedIndentedBlock as IndentedBlock, DeflatedMatch as Match, DeflatedMatchAs as MatchAs, + DeflatedMatchCase as MatchCase, DeflatedMatchClass as MatchClass, + DeflatedMatchKeywordElement as MatchKeywordElement, DeflatedMatchList as MatchList, + DeflatedMatchMapping as MatchMapping, DeflatedMatchMappingElement as MatchMappingElement, + DeflatedMatchOr as MatchOr, DeflatedMatchOrElement as MatchOrElement, + DeflatedMatchPattern as MatchPattern, DeflatedMatchSequence as MatchSequence, + DeflatedMatchSequenceElement as MatchSequenceElement, + DeflatedMatchSingleton as MatchSingleton, DeflatedMatchStar as MatchStar, + DeflatedMatchTuple as MatchTuple, DeflatedMatchValue as MatchValue, + DeflatedNameItem as NameItem, DeflatedNonlocal as Nonlocal, DeflatedOrElse as OrElse, + DeflatedPass as Pass, DeflatedRaise as Raise, DeflatedReturn as Return, + DeflatedSimpleStatementLine as SimpleStatementLine, + DeflatedSimpleStatementSuite as SimpleStatementSuite, + DeflatedSmallStatement as SmallStatement, + DeflatedStarrableMatchSequenceElement as StarrableMatchSequenceElement, + DeflatedStatement as Statement, DeflatedSuite as Suite, DeflatedTry as Try, + DeflatedTryStar as TryStar, DeflatedWhile as While, DeflatedWith as With, + DeflatedWithItem as WithItem, + }; +} diff --git a/native/libcst/src/nodes/module.rs b/native/libcst/src/nodes/module.rs index 7bc42385..656b7d39 100644 --- a/native/libcst/src/nodes/module.rs +++ b/native/libcst/src/nodes/module.rs @@ -4,27 +4,26 @@ // LICENSE file in the root directory of this source tree. use std::mem::swap; -use std::rc::Rc; use crate::tokenizer::whitespace_parser::parse_empty_lines; use crate::tokenizer::Token; use crate::{ nodes::{ codegen::{Codegen, CodegenState}, - statement::Statement, + statement::*, whitespace::EmptyLine, }, tokenizer::whitespace_parser::Config, }; +use libcst_derive::cst_node; #[cfg(feature = "py")] use libcst_derive::TryIntoPy; use super::traits::{Inflate, Result, WithLeadingLines}; -type TokenRef<'a> = Rc>; +type TokenRef<'r, 'a> = &'r Token<'a>; -#[derive(Debug, Eq, PartialEq)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Module<'a> { pub body: Vec>, pub header: Vec>, @@ -52,19 +51,20 @@ impl<'a> Codegen<'a> for Module<'a> { } } -impl<'a> Inflate<'a> for Module<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.default_indent = config.default_indent; - self.default_newline = config.default_newline; - self.has_trailing_newline = config.has_trailing_newline(); - self.body = self.body.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedModule<'r, 'a> { + type Inflated = Module<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let default_indent = config.default_indent; + let default_newline = config.default_newline; + let has_trailing_newline = config.has_trailing_newline(); + let mut body = self.body.inflate(config)?; let mut footer = parse_empty_lines( config, &mut (*self.eof_tok).whitespace_before.borrow_mut(), Some(""), )?; let mut header = vec![]; - if let Some(stmt) = self.body.first_mut() { + if let Some(stmt) = body.first_mut() { swap(stmt.leading_lines(), &mut header); let mut last_indented = None; for (num, line) in footer.iter().enumerate() { @@ -87,8 +87,14 @@ impl<'a> Inflate<'a> for Module<'a> { } else { swap(&mut header, &mut footer); } - self.footer = footer; - self.header = header; - Ok(self) + Ok(Self::Inflated { + body, + header, + footer, + default_indent, + default_newline, + has_trailing_newline, + encoding: self.encoding, + }) } } diff --git a/native/libcst/src/nodes/op.rs b/native/libcst/src/nodes/op.rs index d857e9a9..3e02483e 100644 --- a/native/libcst/src/nodes/op.rs +++ b/native/libcst/src/nodes/op.rs @@ -3,8 +3,6 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -use std::rc::Rc; - use super::{whitespace::ParenthesizableWhitespace, Codegen, CodegenState}; use crate::{ nodes::traits::{Inflate, Result}, @@ -13,13 +11,13 @@ use crate::{ Token, }, }; +use libcst_derive::cst_node; #[cfg(feature = "py")] use libcst_derive::TryIntoPy; -type TokenRef<'a> = Rc>; +type TokenRef<'r, 'a> = &'r Token<'a>; -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Semicolon<'a> { /// Any space that appears directly before this semicolon. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -38,20 +36,23 @@ impl<'a> Codegen<'a> for Semicolon<'a> { } } -impl<'a> Inflate<'a> for Semicolon<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = ParenthesizableWhitespace::SimpleWhitespace( +impl<'r, 'a> Inflate<'a> for DeflatedSemicolon<'r, 'a> { + type Inflated = Semicolon<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = ParenthesizableWhitespace::SimpleWhitespace( parse_simple_whitespace(config, &mut (*self.tok).whitespace_before.borrow_mut())?, ); - self.whitespace_after = ParenthesizableWhitespace::SimpleWhitespace( + let whitespace_after = ParenthesizableWhitespace::SimpleWhitespace( parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?, ); - Ok(self) + Ok(Self::Inflated { + whitespace_before, + whitespace_after, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Comma<'a> { /// Any space that appears directly before this comma. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -70,32 +71,39 @@ impl<'a> Codegen<'a> for Comma<'a> { } } -impl<'a> Inflate<'a> for Comma<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedComma<'r, 'a> { + type Inflated = Comma<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after = parse_parenthesizable_whitespace( + let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { + whitespace_before, + whitespace_after, + }) } } -impl<'a> Comma<'a> { - pub fn inflate_before(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> DeflatedComma<'r, 'a> { + pub fn inflate_before(self, config: &Config<'a>) -> Result> { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - Ok(self) + let whitespace_after = Default::default(); + Ok(Comma { + whitespace_before, + whitespace_after, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct AssignEqual<'a> { /// Any space that appears directly before this equal sign. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -114,22 +122,25 @@ impl<'a> Codegen<'a> for AssignEqual<'a> { } } -impl<'a> Inflate<'a> for AssignEqual<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedAssignEqual<'r, 'a> { + type Inflated = AssignEqual<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after = parse_parenthesizable_whitespace( + let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { + whitespace_before, + whitespace_after, + }) } } -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Dot<'a> { /// Any space that appears directly before this dot. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -148,50 +159,51 @@ impl<'a> Codegen<'a> for Dot<'a> { } } -impl<'a> Inflate<'a> for Dot<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.inflate_before(config)?; - self.inflate_after(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedDot<'r, 'a> { + type Inflated = Dot<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let whitespace_before = self.inflate_before(config)?; + let whitespace_after = self.inflate_after(config)?; + Ok(Self::Inflated { + whitespace_before, + whitespace_after, + }) } } -impl<'a> Dot<'a> { - fn inflate_before(&mut self, config: &Config<'a>) -> Result<()> { - self.whitespace_before = parse_parenthesizable_whitespace( - config, - &mut (*self.tok).whitespace_before.borrow_mut(), - )?; - Ok(()) +impl<'r, 'a> DeflatedDot<'r, 'a> { + fn inflate_before(&mut self, config: &Config<'a>) -> Result> { + parse_parenthesizable_whitespace(config, &mut (*self.tok).whitespace_before.borrow_mut()) } - fn inflate_after(&mut self, config: &Config<'a>) -> Result<()> { - self.whitespace_after = parse_parenthesizable_whitespace( - config, - &mut (*self.tok).whitespace_after.borrow_mut(), - )?; - Ok(()) + fn inflate_after(&mut self, config: &Config<'a>) -> Result> { + parse_parenthesizable_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut()) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ImportStar {} +pub(crate) fn make_importstar<'r, 'a>() -> DeflatedImportStar<'r, 'a> { + DeflatedImportStar { + _phantom: Default::default(), + } +} + impl<'a> Codegen<'a> for ImportStar { fn codegen(&self, state: &mut CodegenState<'a>) { state.add_token("*"); } } -impl<'a> Inflate<'a> for ImportStar { - fn inflate(self, _config: &Config<'a>) -> Result { - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedImportStar<'r, 'a> { + type Inflated = ImportStar; + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(ImportStar {}) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub enum UnaryOp<'a> { Plus { whitespace_after: ParenthesizableWhitespace<'a>, @@ -236,55 +248,43 @@ impl<'a> Codegen<'a> for UnaryOp<'a> { } } -impl<'a> Inflate<'a> for UnaryOp<'a> { - fn inflate(self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedUnaryOp<'r, 'a> { + type Inflated = UnaryOp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::Plus { tok, .. } => { let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Plus { - whitespace_after, - tok, - } + Self::Inflated::Plus { whitespace_after } } Self::Minus { tok, .. } => { let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Minus { - whitespace_after, - tok, - } + Self::Inflated::Minus { whitespace_after } } Self::BitInvert { tok, .. } => { let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::BitInvert { - whitespace_after, - tok, - } + Self::Inflated::BitInvert { whitespace_after } } Self::Not { tok, .. } => { let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Not { - whitespace_after, - tok, - } + Self::Inflated::Not { whitespace_after } } }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub enum BooleanOp<'a> { And { whitespace_before: ParenthesizableWhitespace<'a>, @@ -320,8 +320,9 @@ impl<'a> Codegen<'a> for BooleanOp<'a> { } } -impl<'a> Inflate<'a> for BooleanOp<'a> { - fn inflate(self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedBooleanOp<'r, 'a> { + type Inflated = BooleanOp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::And { tok, .. } => { let whitespace_before = parse_parenthesizable_whitespace( @@ -332,10 +333,9 @@ impl<'a> Inflate<'a> for BooleanOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::And { + Self::Inflated::And { whitespace_before, whitespace_after, - tok, } } Self::Or { tok, .. } => { @@ -347,18 +347,16 @@ impl<'a> Inflate<'a> for BooleanOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Or { + Self::Inflated::Or { whitespace_before, whitespace_after, - tok, } } }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub enum BinaryOp<'a> { Add { whitespace_before: ParenthesizableWhitespace<'a>, @@ -442,81 +440,84 @@ pub enum BinaryOp<'a> { impl<'a> Codegen<'a> for BinaryOp<'a> { fn codegen(&self, state: &mut CodegenState<'a>) { - let (whitespace_before, whitespace_after, tok) = match self { + let (whitespace_before, whitespace_after) = match self { Self::Add { whitespace_before, whitespace_after, - tok, } | Self::Subtract { whitespace_before, whitespace_after, - tok, } | Self::Multiply { whitespace_before, whitespace_after, - tok, } | Self::Divide { whitespace_before, whitespace_after, - tok, } | Self::FloorDivide { whitespace_before, whitespace_after, - tok, } | Self::Modulo { whitespace_before, whitespace_after, - tok, } | Self::Power { whitespace_before, whitespace_after, - tok, } | Self::LeftShift { whitespace_before, whitespace_after, - tok, } | Self::RightShift { whitespace_before, whitespace_after, - tok, } | Self::BitOr { whitespace_before, whitespace_after, - tok, } | Self::BitAnd { whitespace_before, whitespace_after, - tok, } | Self::BitXor { whitespace_before, whitespace_after, - tok, } | Self::MatrixMultiply { whitespace_before, whitespace_after, - tok, - } => (whitespace_before, whitespace_after, tok), + } => (whitespace_before, whitespace_after), + }; + let tok = match self { + BinaryOp::Add { .. } => "+", + BinaryOp::Subtract { .. } => "-", + BinaryOp::Multiply { .. } => "*", + BinaryOp::Divide { .. } => "/", + BinaryOp::FloorDivide { .. } => "//", + BinaryOp::Modulo { .. } => "%", + BinaryOp::Power { .. } => "**", + BinaryOp::LeftShift { .. } => "<<", + BinaryOp::RightShift { .. } => ">>", + BinaryOp::BitOr { .. } => "|", + BinaryOp::BitAnd { .. } => "&", + BinaryOp::BitXor { .. } => "^", + BinaryOp::MatrixMultiply { .. } => "@", }; whitespace_before.codegen(state); - state.add_token(tok.string); + state.add_token(tok); whitespace_after.codegen(state); } } -impl<'a> Inflate<'a> for BinaryOp<'a> { - fn inflate(self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { + type Inflated = BinaryOp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::Add { tok, .. } => { let whitespace_before = parse_parenthesizable_whitespace( @@ -527,10 +528,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Add { + Self::Inflated::Add { whitespace_before, whitespace_after, - tok, } } Self::Subtract { tok, .. } => { @@ -542,10 +542,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Subtract { + Self::Inflated::Subtract { whitespace_before, whitespace_after, - tok, } } Self::Multiply { tok, .. } => { @@ -557,10 +556,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Multiply { + Self::Inflated::Multiply { whitespace_before, whitespace_after, - tok, } } Self::Divide { tok, .. } => { @@ -572,10 +570,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Divide { + Self::Inflated::Divide { whitespace_before, whitespace_after, - tok, } } Self::FloorDivide { tok, .. } => { @@ -587,10 +584,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::FloorDivide { + Self::Inflated::FloorDivide { whitespace_before, whitespace_after, - tok, } } Self::Modulo { tok, .. } => { @@ -602,10 +598,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Modulo { + Self::Inflated::Modulo { whitespace_before, whitespace_after, - tok, } } Self::Power { tok, .. } => { @@ -617,10 +612,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Power { + Self::Inflated::Power { whitespace_before, whitespace_after, - tok, } } Self::LeftShift { tok, .. } => { @@ -632,10 +626,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::LeftShift { + Self::Inflated::LeftShift { whitespace_before, whitespace_after, - tok, } } Self::RightShift { tok, .. } => { @@ -647,10 +640,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::RightShift { + Self::Inflated::RightShift { whitespace_before, whitespace_after, - tok, } } Self::BitOr { tok, .. } => { @@ -662,10 +654,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::BitOr { + Self::Inflated::BitOr { whitespace_before, whitespace_after, - tok, } } Self::BitAnd { tok, .. } => { @@ -677,10 +668,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::BitAnd { + Self::Inflated::BitAnd { whitespace_before, whitespace_after, - tok, } } Self::BitXor { tok, .. } => { @@ -692,10 +682,9 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::BitXor { + Self::Inflated::BitXor { whitespace_before, whitespace_after, - tok, } } Self::MatrixMultiply { tok, .. } => { @@ -707,18 +696,16 @@ impl<'a> Inflate<'a> for BinaryOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::MatrixMultiply { + Self::Inflated::MatrixMultiply { whitespace_before, whitespace_after, - tok, } } }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub enum CompOp<'a> { LessThan { whitespace_before: ParenthesizableWhitespace<'a>, @@ -790,84 +777,83 @@ pub enum CompOp<'a> { impl<'a> Codegen<'a> for CompOp<'a> { fn codegen(&self, state: &mut CodegenState<'a>) { - let (bef, aft, first_tok, between) = match self { + let (bef, aft, between) = match self { Self::LessThan { whitespace_before, whitespace_after, - tok, } | Self::GreaterThan { whitespace_before, whitespace_after, - tok, } | Self::LessThanEqual { whitespace_before, whitespace_after, - tok, } | Self::GreaterThanEqual { whitespace_before, whitespace_after, - tok, } | Self::Equal { whitespace_before, whitespace_after, - tok, } | Self::NotEqual { whitespace_before, whitespace_after, - tok, } | Self::In { whitespace_before, whitespace_after, - tok, } | Self::Is { whitespace_before, whitespace_after, - tok, - } => (whitespace_before, whitespace_after, tok, None), + } => (whitespace_before, whitespace_after, None), Self::IsNot { whitespace_before, whitespace_between, whitespace_after, - is_tok, - not_tok, } => ( whitespace_before, whitespace_after, - is_tok, - Some((whitespace_between, not_tok)), + Some(whitespace_between), ), Self::NotIn { whitespace_before, whitespace_between, whitespace_after, - not_tok, - in_tok, } => ( whitespace_before, whitespace_after, - not_tok, - Some((whitespace_between, in_tok)), + Some(whitespace_between), ), }; + let (first_tok, second_tok) = match self { + CompOp::LessThan { .. } => ("<", None), + CompOp::GreaterThan { .. } => (">", None), + CompOp::LessThanEqual { .. } => ("<=", None), + CompOp::GreaterThanEqual { .. } => (">=", None), + CompOp::Equal { .. } => ("==", None), + CompOp::NotEqual { .. } => ("!=", None), + CompOp::In { .. } => ("in", None), + CompOp::NotIn { .. } => ("not", Some("in")), + CompOp::Is { .. } => ("is", None), + CompOp::IsNot { .. } => ("is", Some("not")), + }; bef.codegen(state); - state.add_token(first_tok.string); - if let Some((btw, second_tok)) = between { + state.add_token(first_tok); + if let (Some(btw), Some(second_tok)) = (between, second_tok) { btw.codegen(state); - state.add_token(second_tok.string); + state.add_token(second_tok); } aft.codegen(state); } } -impl<'a> Inflate<'a> for CompOp<'a> { - fn inflate(self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { + type Inflated = CompOp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::LessThan { tok, .. } => { let whitespace_before = parse_parenthesizable_whitespace( @@ -878,10 +864,9 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::LessThan { + Self::Inflated::LessThan { whitespace_before, whitespace_after, - tok, } } Self::GreaterThan { tok, .. } => { @@ -893,10 +878,9 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::GreaterThan { + Self::Inflated::GreaterThan { whitespace_before, whitespace_after, - tok, } } Self::LessThanEqual { tok, .. } => { @@ -908,10 +892,9 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::LessThanEqual { + Self::Inflated::LessThanEqual { whitespace_before, whitespace_after, - tok, } } Self::GreaterThanEqual { tok, .. } => { @@ -923,10 +906,9 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::GreaterThanEqual { + Self::Inflated::GreaterThanEqual { whitespace_before, whitespace_after, - tok, } } Self::Equal { tok, .. } => { @@ -938,10 +920,9 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Equal { + Self::Inflated::Equal { whitespace_before, whitespace_after, - tok, } } Self::NotEqual { tok, .. } => { @@ -953,10 +934,9 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::NotEqual { + Self::Inflated::NotEqual { whitespace_before, whitespace_after, - tok, } } Self::In { tok, .. } => { @@ -968,10 +948,9 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::In { + Self::Inflated::In { whitespace_before, whitespace_after, - tok, } } Self::Is { tok, .. } => { @@ -983,10 +962,9 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Is { + Self::Inflated::Is { whitespace_before, whitespace_after, - tok, } } Self::IsNot { @@ -1004,12 +982,10 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*not_tok).whitespace_after.borrow_mut(), )?; - Self::IsNot { + Self::Inflated::IsNot { whitespace_before, whitespace_between, whitespace_after, - is_tok, - not_tok, } } Self::NotIn { @@ -1027,20 +1003,17 @@ impl<'a> Inflate<'a> for CompOp<'a> { config, &mut (*in_tok).whitespace_after.borrow_mut(), )?; - Self::NotIn { + Self::Inflated::NotIn { whitespace_before, whitespace_between, whitespace_after, - not_tok, - in_tok, } } }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Colon<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub whitespace_after: ParenthesizableWhitespace<'a>, @@ -1049,17 +1022,21 @@ pub struct Colon<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Colon<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedColon<'r, 'a> { + type Inflated = Colon<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after = parse_parenthesizable_whitespace( + let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { + whitespace_before, + whitespace_after, + }) } } @@ -1071,8 +1048,7 @@ impl<'a> Codegen<'a> for Colon<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub enum AugOp<'a> { AddAssign { whitespace_before: ParenthesizableWhitespace<'a>, @@ -1154,8 +1130,9 @@ pub enum AugOp<'a> { }, } -impl<'a> Inflate<'a> for AugOp<'a> { - fn inflate(self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { + type Inflated = AugOp<'a>; + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::AddAssign { tok, .. } => { let whitespace_before = parse_parenthesizable_whitespace( @@ -1166,10 +1143,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::AddAssign { + Self::Inflated::AddAssign { whitespace_before, whitespace_after, - tok, } } Self::SubtractAssign { tok, .. } => { @@ -1181,10 +1157,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::SubtractAssign { + Self::Inflated::SubtractAssign { whitespace_before, whitespace_after, - tok, } } Self::MultiplyAssign { tok, .. } => { @@ -1196,10 +1171,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::MultiplyAssign { + Self::Inflated::MultiplyAssign { whitespace_before, whitespace_after, - tok, } } Self::MatrixMultiplyAssign { tok, .. } => { @@ -1211,10 +1185,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::MatrixMultiplyAssign { + Self::Inflated::MatrixMultiplyAssign { whitespace_before, whitespace_after, - tok, } } Self::DivideAssign { tok, .. } => { @@ -1226,10 +1199,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::DivideAssign { + Self::Inflated::DivideAssign { whitespace_before, whitespace_after, - tok, } } Self::ModuloAssign { tok, .. } => { @@ -1241,10 +1213,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::ModuloAssign { + Self::Inflated::ModuloAssign { whitespace_before, whitespace_after, - tok, } } Self::BitAndAssign { tok, .. } => { @@ -1256,10 +1227,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::BitAndAssign { + Self::Inflated::BitAndAssign { whitespace_before, whitespace_after, - tok, } } Self::BitOrAssign { tok, .. } => { @@ -1271,10 +1241,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::BitOrAssign { + Self::Inflated::BitOrAssign { whitespace_before, whitespace_after, - tok, } } Self::BitXorAssign { tok, .. } => { @@ -1286,10 +1255,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::BitXorAssign { + Self::Inflated::BitXorAssign { whitespace_before, whitespace_after, - tok, } } Self::LeftShiftAssign { tok, .. } => { @@ -1301,10 +1269,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::LeftShiftAssign { + Self::Inflated::LeftShiftAssign { whitespace_before, whitespace_after, - tok, } } Self::RightShiftAssign { tok, .. } => { @@ -1316,10 +1283,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::RightShiftAssign { + Self::Inflated::RightShiftAssign { whitespace_before, whitespace_after, - tok, } } Self::PowerAssign { tok, .. } => { @@ -1331,10 +1297,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::PowerAssign { + Self::Inflated::PowerAssign { whitespace_before, whitespace_after, - tok, } } Self::FloorDivideAssign { tok, .. } => { @@ -1346,10 +1311,9 @@ impl<'a> Inflate<'a> for AugOp<'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::FloorDivideAssign { + Self::Inflated::FloorDivideAssign { whitespace_before, whitespace_after, - tok, } } }) @@ -1431,8 +1395,7 @@ impl<'a> Codegen<'a> for AugOp<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct BitOr<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub whitespace_after: ParenthesizableWhitespace<'a>, @@ -1440,17 +1403,21 @@ pub struct BitOr<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for BitOr<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedBitOr<'r, 'a> { + type Inflated = BitOr<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after = parse_parenthesizable_whitespace( + let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { + whitespace_before, + whitespace_after, + }) } } diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 65c702f3..65006ab3 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -3,7 +3,7 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -use std::{mem::swap, rc::Rc}; +use std::mem::swap; use super::{ inflate_helpers::adjust_parameters_trailing_whitespace, Attribute, Codegen, CodegenState, @@ -13,8 +13,12 @@ use super::{ }; use crate::{ nodes::{ - traits::{Inflate, Result, WithComma, WithLeadingLines}, - Arg, AssignEqual, Asynchronous, AugOp, BitOr, Element, ParenthesizedNode, + expression::*, + op::*, + traits::{ + Inflate, ParenthesizedDeflatedNode, ParenthesizedNode, Result, WithComma, + WithLeadingLines, + }, }, tokenizer::{ whitespace_parser::{ @@ -27,13 +31,12 @@ use crate::{ }; #[cfg(feature = "py")] use libcst_derive::TryIntoPy; -use libcst_derive::{Codegen, Inflate, ParenthesizedNode}; +use libcst_derive::{cst_node, Codegen, Inflate, ParenthesizedDeflatedNode, ParenthesizedNode}; -type TokenRef<'a> = Rc>; +type TokenRef<'r, 'a> = &'r Token<'a>; #[allow(clippy::large_enum_variant)] -#[derive(Debug, Eq, PartialEq, Clone, Inflate, Codegen)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Inflate, Codegen)] pub enum Statement<'a> { Simple(SimpleStatementLine<'a>), Compound(CompoundStatement<'a>), @@ -48,9 +51,8 @@ impl<'a> WithLeadingLines<'a> for Statement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] #[allow(clippy::large_enum_variant)] +#[cst_node(Inflate, Codegen)] pub enum CompoundStatement<'a> { FunctionDef(FunctionDef<'a>), If(If<'a>), @@ -79,15 +81,13 @@ impl<'a> WithLeadingLines<'a> for CompoundStatement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Inflate, Codegen)] pub enum Suite<'a> { IndentedBlock(IndentedBlock<'a>), SimpleStatementSuite(SimpleStatementSuite<'a>), } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct IndentedBlock<'a> { /// Sequence of statements belonging to this indented block. pub body: Vec>, @@ -144,9 +144,10 @@ impl<'a> Codegen<'a> for IndentedBlock<'a> { } } -impl<'a> Inflate<'a> for IndentedBlock<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.body = self.body.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedIndentedBlock<'r, 'a> { + type Inflated = IndentedBlock<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let body = self.body.inflate(config)?; // We want to be able to only keep comments in the footer that are actually for // this IndentedBlock. We do so by assuming that lines which are indented to the // same level as the block itself are comments that go at the footer of the @@ -167,18 +168,20 @@ impl<'a> Inflate<'a> for IndentedBlock<'a> { config, &mut (*self.newline_tok).whitespace_before.borrow_mut(), )?; - self.footer = footer; - self.header = header; - self.indent = self.indent_tok.relative_indent; - if self.indent == Some(config.default_indent) { - self.indent = None; + let mut indent = self.indent_tok.relative_indent; + if indent == Some(config.default_indent) { + indent = None; } - Ok(self) + Ok(Self::Inflated { + body, + header, + indent, + footer, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct SimpleStatementSuite<'a> { /// Sequence of small statements. All but the last statement are required to have /// a semicolon. @@ -193,18 +196,23 @@ pub struct SimpleStatementSuite<'a> { pub(crate) newline_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for SimpleStatementSuite<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_whitespace = parse_simple_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedSimpleStatementSuite<'r, 'a> { + type Inflated = SimpleStatementSuite<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_whitespace = parse_simple_whitespace( config, &mut (*self.first_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; - self.trailing_whitespace = parse_trailing_whitespace( + let body = self.body.inflate(config)?; + let trailing_whitespace = parse_trailing_whitespace( config, &mut (*self.newline_tok).whitespace_before.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { + body, + leading_whitespace, + trailing_whitespace, + }) } } @@ -232,8 +240,7 @@ impl<'a> Codegen<'a> for SimpleStatementSuite<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct SimpleStatementLine<'a> { /// Sequence of small statements. All but the last statement are required to have /// a semicolon. @@ -258,25 +265,29 @@ impl<'a> Codegen<'a> for SimpleStatementLine<'a> { } } -impl<'a> Inflate<'a> for SimpleStatementLine<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedSimpleStatementLine<'r, 'a> { + type Inflated = SimpleStatementLine<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.first_tok).whitespace_before.borrow_mut(), None, )?; - self.body = self.body.inflate(config)?; - self.trailing_whitespace = parse_trailing_whitespace( + let body = self.body.inflate(config)?; + let trailing_whitespace = parse_trailing_whitespace( config, &mut (*self.newline_tok).whitespace_before.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { + body, + leading_lines, + trailing_whitespace, + }) } } #[allow(dead_code, clippy::large_enum_variant)] -#[derive(Debug, Eq, PartialEq, Clone, Codegen, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, Inflate)] pub enum SmallStatement<'a> { Pass(Pass<'a>), Break(Break<'a>), @@ -295,8 +306,8 @@ pub enum SmallStatement<'a> { Del(Del<'a>), } -impl<'a> SmallStatement<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedSmallStatement<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { match self { Self::Pass(p) => Self::Pass(p.with_semicolon(semicolon)), Self::Break(p) => Self::Break(p.with_semicolon(semicolon)), @@ -317,13 +328,12 @@ impl<'a> SmallStatement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Pass<'a> { pub semicolon: Option>, } -impl<'a> Pass<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedPass<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon } } } @@ -333,20 +343,20 @@ impl<'a> Codegen<'a> for Pass<'a> { self.semicolon.codegen(state); } } -impl<'a> Inflate<'a> for Pass<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedPass<'r, 'a> { + type Inflated = Pass<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { semicolon }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Break<'a> { pub semicolon: Option>, } -impl<'a> Break<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedBreak<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon } } } @@ -356,20 +366,20 @@ impl<'a> Codegen<'a> for Break<'a> { self.semicolon.codegen(state); } } -impl<'a> Inflate<'a> for Break<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedBreak<'r, 'a> { + type Inflated = Break<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { semicolon }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Continue<'a> { pub semicolon: Option>, } -impl<'a> Continue<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedContinue<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon } } } @@ -379,21 +389,21 @@ impl<'a> Codegen<'a> for Continue<'a> { self.semicolon.codegen(state); } } -impl<'a> Inflate<'a> for Continue<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedContinue<'r, 'a> { + type Inflated = Continue<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { semicolon }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Expr<'a> { pub value: Expression<'a>, pub semicolon: Option>, } -impl<'a> Expr<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedExpr<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } @@ -403,16 +413,16 @@ impl<'a> Codegen<'a> for Expr<'a> { self.semicolon.codegen(state); } } -impl<'a> Inflate<'a> for Expr<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.value = self.value.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedExpr<'r, 'a> { + type Inflated = Expr<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let value = self.value.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { value, semicolon }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Assign<'a> { pub targets: Vec>, pub value: Expression<'a>, @@ -431,23 +441,27 @@ impl<'a> Codegen<'a> for Assign<'a> { } } -impl<'a> Inflate<'a> for Assign<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.targets = self.targets.inflate(config)?; - self.value = self.value.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedAssign<'r, 'a> { + type Inflated = Assign<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let targets = self.targets.inflate(config)?; + let value = self.value.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + targets, + value, + semicolon, + }) } } -impl<'a> Assign<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedAssign<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct AssignTarget<'a> { pub target: AssignTargetExpression<'a>, pub whitespace_before_equal: SimpleWhitespace<'a>, @@ -465,22 +479,26 @@ impl<'a> Codegen<'a> for AssignTarget<'a> { } } -impl<'a> Inflate<'a> for AssignTarget<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.target = self.target.inflate(config)?; - self.whitespace_before_equal = parse_simple_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedAssignTarget<'r, 'a> { + type Inflated = AssignTarget<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let target = self.target.inflate(config)?; + let whitespace_before_equal = parse_simple_whitespace( config, &mut (*self.equal_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after_equal = + let whitespace_after_equal = parse_simple_whitespace(config, &mut (*self.equal_tok).whitespace_after.borrow_mut())?; - Ok(self) + Ok(Self::Inflated { + target, + whitespace_before_equal, + whitespace_after_equal, + }) } } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, ParenthesizedNode, Inflate)] pub enum AssignTargetExpression<'a> { Name(Box>), Attribute(Box>), @@ -490,8 +508,7 @@ pub enum AssignTargetExpression<'a> { Subscript(Box>), } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Import<'a> { pub names: Vec>, pub semicolon: Option>, @@ -516,26 +533,30 @@ impl<'a> Codegen<'a> for Import<'a> { } } -impl<'a> Inflate<'a> for Import<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after_import = parse_simple_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedImport<'r, 'a> { + type Inflated = Import<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_import = parse_simple_whitespace( config, &mut (*self.import_tok).whitespace_after.borrow_mut(), )?; - self.names = self.names.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) + let names = self.names.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + names, + semicolon, + whitespace_after_import, + }) } } -impl<'a> Import<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedImport<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ImportFrom<'a> { #[cfg_attr(feature = "py", no_py_default)] pub module: Option>, @@ -579,96 +600,119 @@ impl<'a> Codegen<'a> for ImportFrom<'a> { } } -impl<'a> Inflate<'a> for ImportFrom<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after_from = +impl<'r, 'a> Inflate<'a> for DeflatedImportFrom<'r, 'a> { + type Inflated = ImportFrom<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_from = parse_simple_whitespace(config, &mut (*self.from_tok).whitespace_after.borrow_mut())?; - self.module = self.module.inflate(config)?; + let module = self.module.inflate(config)?; - self.whitespace_after_import = parse_simple_whitespace( + let whitespace_after_import = parse_simple_whitespace( config, &mut (*self.import_tok).whitespace_after.borrow_mut(), )?; - self.relative = inflate_dots(self.relative, config)?; + let mut relative = inflate_dots(self.relative, config)?; + let mut whitespace_before_import = Default::default(); - if !self.relative.is_empty() && self.module.is_none() { + if !relative.is_empty() && module.is_none() { // For relative-only imports relocate the space after the final dot to be owned // by the import token. if let Some(Dot { whitespace_after: ParenthesizableWhitespace::SimpleWhitespace(dot_ws), .. - }) = self.relative.last_mut() + }) = relative.last_mut() { - swap(dot_ws, &mut self.whitespace_before_import); + swap(dot_ws, &mut whitespace_before_import); } } else { - self.whitespace_before_import = parse_simple_whitespace( + whitespace_before_import = parse_simple_whitespace( config, &mut (*self.import_tok).whitespace_before.borrow_mut(), )?; } - self.lpar = self.lpar.inflate(config)?; - self.names = self.names.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; + let lpar = self.lpar.inflate(config)?; + let names = self.names.inflate(config)?; + let rpar = self.rpar.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + module, + names, + relative, + lpar, + rpar, + semicolon, + whitespace_after_from, + whitespace_before_import, + whitespace_after_import, + }) } } -fn inflate_dots<'a>(dots: Vec>, config: &Config<'a>) -> Result>> { +fn inflate_dots<'r, 'a>( + dots: Vec>, + config: &Config<'a>, +) -> Result>> { let mut ret: Vec> = vec![]; - let mut last_tok: Option> = None; - for mut dot in dots { + let mut last_tok: Option> = None; + for dot in dots { if let Some(last_tokref) = &last_tok { // Consecutive dots having the same Token can only happen if `...` was // parsed as a single ELLIPSIS token. In this case the token's // whitespace_before belongs to the first dot, but the whitespace_after is // moved to the 3rd dot (by swapping it twice) if last_tokref.start_pos == dot.tok.start_pos { + let mut subsequent_dot = Dot { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + }; swap( &mut ret.last_mut().unwrap().whitespace_after, - &mut dot.whitespace_after, + &mut subsequent_dot.whitespace_after, ); - ret.push(dot); + ret.push(subsequent_dot); continue; } } - last_tok = Some(dot.tok.clone()); + last_tok = Some(dot.tok); ret.push(dot.inflate(config)?); } Ok(ret) } -impl<'a> ImportFrom<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedImportFrom<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ImportAlias<'a> { pub name: NameOrAttribute<'a>, pub asname: Option>, pub comma: Option>, } -impl<'a> Inflate<'a> for ImportAlias<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.name = self.name.inflate(config)?; - self.asname = self.asname.inflate(config)?; - self.comma = self.comma.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedImportAlias<'r, 'a> { + type Inflated = ImportAlias<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let name = self.name.inflate(config)?; + let asname = self.asname.inflate(config)?; + let comma = self.comma.inflate(config)?; + Ok(Self::Inflated { + name, + asname, + comma, + }) } } -impl<'a> WithComma<'a> for ImportAlias<'a> { - fn with_comma(self, comma: Comma<'a>) -> ImportAlias<'a> { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedImportAlias<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { let comma = Some(comma); Self { comma, ..self } } @@ -686,8 +730,7 @@ impl<'a> Codegen<'a> for ImportAlias<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct AsName<'a> { pub name: AssignTargetExpression<'a>, pub whitespace_before_as: ParenthesizableWhitespace<'a>, @@ -705,23 +748,27 @@ impl<'a> Codegen<'a> for AsName<'a> { } } -impl<'a> Inflate<'a> for AsName<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before_as = parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedAsName<'r, 'a> { + type Inflated = AsName<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before_as = parse_parenthesizable_whitespace( config, &mut (*self.as_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_after_as = parse_parenthesizable_whitespace( + let whitespace_after_as = parse_parenthesizable_whitespace( config, &mut (*self.as_tok).whitespace_after.borrow_mut(), )?; - self.name = self.name.inflate(config)?; - Ok(self) + let name = self.name.inflate(config)?; + Ok(Self::Inflated { + name, + whitespace_before_as, + whitespace_after_as, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Inflate)] pub enum ImportNames<'a> { Star(ImportStar), Aliases(Vec>), @@ -743,8 +790,7 @@ impl<'a> Codegen<'a> for ImportNames<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct FunctionDef<'a> { pub name: Name<'a>, pub params: Parameters<'a>, @@ -766,8 +812,8 @@ pub struct FunctionDef<'a> { pub(crate) colon_tok: TokenRef<'a>, } -impl<'a> FunctionDef<'a> { - pub fn with_decorators(self, decorators: Vec>) -> Self { +impl<'r, 'a> DeflatedFunctionDef<'r, 'a> { + pub fn with_decorators(self, decorators: Vec>) -> Self { Self { decorators, ..self } } } @@ -807,9 +853,10 @@ impl<'a> Codegen<'a> for FunctionDef<'a> { } } -impl<'a> Inflate<'a> for FunctionDef<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.decorators = self.decorators.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedFunctionDef<'r, 'a> { + type Inflated = FunctionDef<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let mut decorators = self.decorators.inflate(config)?; let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { let whitespace_after = parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; @@ -825,8 +872,7 @@ impl<'a> Inflate<'a> for FunctionDef<'a> { (None, None) }; - self.asynchronous = asynchronous; - let leading_lines = if let Some(ll) = leading_lines { + let mut leading_lines = if let Some(ll) = leading_lines { ll } else { parse_empty_lines( @@ -836,41 +882,54 @@ impl<'a> Inflate<'a> for FunctionDef<'a> { )? }; - self.leading_lines = leading_lines; - if let Some(dec) = self.decorators.first_mut() { - swap(&mut self.lines_after_decorators, &mut self.leading_lines); - swap(&mut dec.leading_lines, &mut self.leading_lines); + let mut lines_after_decorators = Default::default(); + + if let Some(dec) = decorators.first_mut() { + swap(&mut lines_after_decorators, &mut leading_lines); + swap(&mut dec.leading_lines, &mut leading_lines); } - self.whitespace_after_def = + let whitespace_after_def = parse_simple_whitespace(config, &mut (*self.def_tok).whitespace_after.borrow_mut())?; - self.name = self.name.inflate(config)?; - self.whitespace_after_name = parse_simple_whitespace( + let name = self.name.inflate(config)?; + let whitespace_after_name = parse_simple_whitespace( config, &mut (*self.open_paren_tok).whitespace_before.borrow_mut(), )?; - self.whitespace_before_params = parse_parenthesizable_whitespace( + let whitespace_before_params = parse_parenthesizable_whitespace( config, &mut (*self.open_paren_tok).whitespace_after.borrow_mut(), )?; - self.params = self.params.inflate(config)?; - adjust_parameters_trailing_whitespace(config, &mut self.params, &self.close_paren_tok)?; + let mut params = self.params.inflate(config)?; + adjust_parameters_trailing_whitespace(config, &mut params, &self.close_paren_tok)?; - self.returns = self.returns.inflate(config)?; - self.whitespace_before_colon = parse_simple_whitespace( + let returns = self.returns.inflate(config)?; + let whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; - Ok(self) + let body = self.body.inflate(config)?; + Ok(Self::Inflated { + name, + params, + body, + decorators, + returns, + asynchronous, + leading_lines, + lines_after_decorators, + whitespace_after_def, + whitespace_after_name, + whitespace_before_params, + whitespace_before_colon, + }) } } -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Decorator<'a> { pub decorator: Expression<'a>, pub leading_lines: Vec>, @@ -894,26 +953,31 @@ impl<'a> Codegen<'a> for Decorator<'a> { } } -impl<'a> Inflate<'a> for Decorator<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedDecorator<'r, 'a> { + type Inflated = Decorator<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.at_tok).whitespace_before.borrow_mut(), None, )?; - self.whitespace_after_at = + let whitespace_after_at = parse_simple_whitespace(config, &mut (*self.at_tok).whitespace_after.borrow_mut())?; - self.decorator = self.decorator.inflate(config)?; - self.trailing_whitespace = parse_trailing_whitespace( + let decorator = self.decorator.inflate(config)?; + let trailing_whitespace = parse_trailing_whitespace( config, &mut (*self.newline_tok).whitespace_before.borrow_mut(), )?; - Ok(self) + Ok(Self::Inflated { + decorator, + leading_lines, + whitespace_after_at, + trailing_whitespace, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct If<'a> { /// The expression that, when evaluated, should give us a truthy value pub test: Expression<'a>, @@ -960,37 +1024,44 @@ impl<'a> Codegen<'a> for If<'a> { } } -impl<'a> Inflate<'a> for If<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedIf<'r, 'a> { + type Inflated = If<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.if_tok).whitespace_before.borrow_mut(), None, )?; - self.whitespace_before_test = + let whitespace_before_test = parse_simple_whitespace(config, &mut (*self.if_tok).whitespace_after.borrow_mut())?; - self.test = self.test.inflate(config)?; - self.whitespace_after_test = parse_simple_whitespace( + let test = self.test.inflate(config)?; + let whitespace_after_test = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; - self.orelse = self.orelse.inflate(config)?; + let body = self.body.inflate(config)?; + let orelse = self.orelse.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + test, + body, + orelse, + leading_lines, + whitespace_before_test, + whitespace_after_test, + is_elif: self.is_elif, + }) } } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Inflate, Codegen)] pub enum OrElse<'a> { Elif(If<'a>), Else(Else<'a>), } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Else<'a> { pub body: Suite<'a>, /// Sequence of empty lines appearing before this compound statement line. @@ -1016,25 +1087,29 @@ impl<'a> Codegen<'a> for Else<'a> { } } -impl<'a> Inflate<'a> for Else<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedElse<'r, 'a> { + type Inflated = Else<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.else_tok).whitespace_before.borrow_mut(), None, )?; - self.whitespace_before_colon = parse_simple_whitespace( + let whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; + let body = self.body.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + body, + leading_lines, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Annotation<'a> { pub annotation: Expression<'a>, pub whitespace_before_indicator: Option>, @@ -1059,23 +1134,27 @@ impl<'a> Annotation<'a> { } } -impl<'a> Inflate<'a> for Annotation<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_before_indicator = Some(parse_parenthesizable_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedAnnotation<'r, 'a> { + type Inflated = Annotation<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_before_indicator = Some(parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?); - self.whitespace_after_indicator = parse_parenthesizable_whitespace( + let whitespace_after_indicator = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - self.annotation = self.annotation.inflate(config)?; - Ok(self) + let annotation = self.annotation.inflate(config)?; + Ok(Self::Inflated { + annotation, + whitespace_before_indicator, + whitespace_after_indicator, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct AnnAssign<'a> { pub target: AssignTargetExpression<'a>, pub annotation: Annotation<'a>, @@ -1103,25 +1182,31 @@ impl<'a> Codegen<'a> for AnnAssign<'a> { } } -impl<'a> Inflate<'a> for AnnAssign<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.target = self.target.inflate(config)?; - self.annotation = self.annotation.inflate(config)?; - self.value = self.value.inflate(config)?; - self.equal = self.equal.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedAnnAssign<'r, 'a> { + type Inflated = AnnAssign<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let target = self.target.inflate(config)?; + let annotation = self.annotation.inflate(config)?; + let value = self.value.inflate(config)?; + let equal = self.equal.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + target, + annotation, + value, + equal, + semicolon, + }) } } -impl<'a> AnnAssign<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedAnnAssign<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Return<'a> { pub value: Option>, pub whitespace_after_return: Option>, @@ -1148,32 +1233,36 @@ impl<'a> Codegen<'a> for Return<'a> { } } -impl<'a> Inflate<'a> for Return<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - if self.value.is_some() { - self.whitespace_after_return = Some(parse_simple_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedReturn<'r, 'a> { + type Inflated = Return<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_return = if self.value.is_some() { + Some(parse_simple_whitespace( config, &mut (*self.return_tok).whitespace_after.borrow_mut(), - )?); + )?) } else { // otherwise space is owned by semicolon or small statement // whitespace is not None to preserve a quirk of the pure python parser - self.whitespace_after_return = Some(Default::default()) - } - self.value = self.value.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) + Some(Default::default()) + }; + let value = self.value.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + value, + whitespace_after_return, + semicolon, + }) } } -impl<'a> Return<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedReturn<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Assert<'a> { pub test: Expression<'a>, pub msg: Option>, @@ -1202,30 +1291,36 @@ impl<'a> Codegen<'a> for Assert<'a> { } } } -impl<'a> Inflate<'a> for Assert<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after_assert = parse_simple_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedAssert<'r, 'a> { + type Inflated = Assert<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_assert = parse_simple_whitespace( config, &mut (*self.assert_tok).whitespace_after.borrow_mut(), )?; - self.test = self.test.inflate(config)?; - self.comma = self.comma.inflate(config)?; - self.msg = self.msg.inflate(config)?; + let test = self.test.inflate(config)?; + let comma = self.comma.inflate(config)?; + let msg = self.msg.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + test, + msg, + comma, + whitespace_after_assert, + semicolon, + }) } } -impl<'a> Assert<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedAssert<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Raise<'a> { pub exc: Option>, pub cause: Option>, @@ -1235,26 +1330,34 @@ pub struct Raise<'a> { pub(crate) raise_tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Raise<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - if self.exc.is_some() { - self.whitespace_after_raise = Some(parse_simple_whitespace( +impl<'r, 'a> Inflate<'a> for DeflatedRaise<'r, 'a> { + type Inflated = Raise<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_raise = if self.exc.is_some() { + Some(parse_simple_whitespace( config, &mut (*self.raise_tok).whitespace_after.borrow_mut(), - )?); - } + )?) + } else { + Default::default() + }; - self.exc = self.exc.inflate(config)?; - self.cause = self.cause.inflate(config)?; - if self.exc.is_none() { - if let Some(cause) = self.cause.as_mut() { + let exc = self.exc.inflate(config)?; + let mut cause = self.cause.inflate(config)?; + if exc.is_none() { + if let Some(cause) = cause.as_mut() { // in `raise from`, `raise` owns the shared whitespace cause.whitespace_before_from = None; } } - self.semicolon = self.semicolon.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + exc, + cause, + whitespace_after_raise, + semicolon, + }) } } @@ -1281,24 +1384,24 @@ impl<'a> Codegen<'a> for Raise<'a> { } } -impl<'a> Raise<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedRaise<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct NameItem<'a> { pub name: Name<'a>, pub comma: Option>, } -impl<'a> Inflate<'a> for NameItem<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.name = self.name.inflate(config)?; - self.comma = self.comma.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedNameItem<'r, 'a> { + type Inflated = NameItem<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let name = self.name.inflate(config)?; + let comma = self.comma.inflate(config)?; + Ok(Self::Inflated { name, comma }) } } @@ -1313,8 +1416,7 @@ impl<'a> NameItem<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Global<'a> { pub names: Vec>, pub whitespace_after_global: SimpleWhitespace<'a>, @@ -1323,13 +1425,18 @@ pub struct Global<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Global<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after_global = +impl<'r, 'a> Inflate<'a> for DeflatedGlobal<'r, 'a> { + type Inflated = Global<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_global = parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; - self.names = self.names.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) + let names = self.names.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + names, + whitespace_after_global, + semicolon, + }) } } @@ -1348,14 +1455,13 @@ impl<'a> Codegen<'a> for Global<'a> { } } -impl<'a> Global<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedGlobal<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Nonlocal<'a> { pub names: Vec>, pub whitespace_after_nonlocal: SimpleWhitespace<'a>, @@ -1364,13 +1470,18 @@ pub struct Nonlocal<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Nonlocal<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after_nonlocal = +impl<'r, 'a> Inflate<'a> for DeflatedNonlocal<'r, 'a> { + type Inflated = Nonlocal<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_nonlocal = parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; - self.names = self.names.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) + let names = self.names.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + names, + whitespace_after_nonlocal, + semicolon, + }) } } @@ -1389,14 +1500,13 @@ impl<'a> Codegen<'a> for Nonlocal<'a> { } } -impl<'a> Nonlocal<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedNonlocal<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct For<'a> { pub target: AssignTargetExpression<'a>, pub iter: Expression<'a>, @@ -1442,8 +1552,9 @@ impl<'a> Codegen<'a> for For<'a> { } } -impl<'a> Inflate<'a> for For<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedFor<'r, 'a> { + type Inflated = For<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { let whitespace_after = parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; @@ -1458,7 +1569,7 @@ impl<'a> Inflate<'a> for For<'a> { } else { (None, None) }; - self.leading_lines = if let Some(ll) = leading_lines { + let leading_lines = if let Some(ll) = leading_lines { ll } else { parse_empty_lines( @@ -1467,29 +1578,38 @@ impl<'a> Inflate<'a> for For<'a> { None, )? }; - self.asynchronous = asynchronous; - self.whitespace_after_for = + let whitespace_after_for = parse_simple_whitespace(config, &mut (*self.for_tok).whitespace_after.borrow_mut())?; - self.target = self.target.inflate(config)?; - self.whitespace_before_in = + let target = self.target.inflate(config)?; + let whitespace_before_in = parse_simple_whitespace(config, &mut (*self.in_tok).whitespace_before.borrow_mut())?; - self.whitespace_after_in = + let whitespace_after_in = parse_simple_whitespace(config, &mut (*self.in_tok).whitespace_after.borrow_mut())?; - self.iter = self.iter.inflate(config)?; - self.whitespace_before_colon = parse_simple_whitespace( + let iter = self.iter.inflate(config)?; + let whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; - self.orelse = self.orelse.inflate(config)?; + let body = self.body.inflate(config)?; + let orelse = self.orelse.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + target, + iter, + body, + orelse, + asynchronous, + leading_lines, + whitespace_after_for, + whitespace_before_in, + whitespace_after_in, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct While<'a> { pub test: Expression<'a>, pub body: Suite<'a>, @@ -1521,29 +1641,36 @@ impl<'a> Codegen<'a> for While<'a> { } } -impl<'a> Inflate<'a> for While<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedWhile<'r, 'a> { + type Inflated = While<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.while_tok).whitespace_before.borrow_mut(), None, )?; - self.whitespace_after_while = + let whitespace_after_while = parse_simple_whitespace(config, &mut (*self.while_tok).whitespace_after.borrow_mut())?; - self.test = self.test.inflate(config)?; - self.whitespace_before_colon = parse_simple_whitespace( + let test = self.test.inflate(config)?; + let whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; - self.orelse = self.orelse.inflate(config)?; + let body = self.body.inflate(config)?; + let orelse = self.orelse.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + test, + body, + orelse, + leading_lines, + whitespace_after_while, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ClassDef<'a> { pub name: Name<'a>, pub body: Suite<'a>, @@ -1559,7 +1686,8 @@ pub struct ClassDef<'a> { pub whitespace_before_colon: SimpleWhitespace<'a>, pub(crate) class_tok: TokenRef<'a>, - pub(crate) parens_tok: Option<(TokenRef<'a>, TokenRef<'a>)>, + pub(crate) lpar_tok: Option>, + pub(crate) rpar_tok: Option>, pub(crate) colon_tok: TokenRef<'a>, } @@ -1606,51 +1734,69 @@ impl<'a> Codegen<'a> for ClassDef<'a> { } } -impl<'a> Inflate<'a> for ClassDef<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedClassDef<'r, 'a> { + type Inflated = ClassDef<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let mut leading_lines = parse_empty_lines( config, &mut (*self.class_tok).whitespace_before.borrow_mut(), None, )?; - self.decorators = self.decorators.inflate(config)?; - if let Some(dec) = self.decorators.first_mut() { - swap(&mut self.lines_after_decorators, &mut self.leading_lines); - swap(&mut dec.leading_lines, &mut self.leading_lines); + let mut decorators = self.decorators.inflate(config)?; + let mut lines_after_decorators = Default::default(); + if let Some(dec) = decorators.first_mut() { + swap(&mut lines_after_decorators, &mut leading_lines); + swap(&mut dec.leading_lines, &mut leading_lines); } - self.whitespace_after_class = + let whitespace_after_class = parse_simple_whitespace(config, &mut (*self.class_tok).whitespace_after.borrow_mut())?; - self.name = self.name.inflate(config)?; + let name = self.name.inflate(config)?; - if let Some((lpar_tok, _)) = self.parens_tok.as_mut() { - self.whitespace_after_name = - parse_simple_whitespace(config, &mut lpar_tok.whitespace_before.borrow_mut())?; - self.lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; - self.bases = self.bases.inflate(config)?; - self.keywords = self.keywords.inflate(config)?; - self.rpar = self.rpar.map(|rpar| rpar.inflate(config)).transpose()?; - // TODO: set whitespace_after_arg for last arg? - } + let (whitespace_after_name, lpar, bases, keywords, rpar) = + if let Some(lpar_tok) = self.lpar_tok.as_mut() { + ( + parse_simple_whitespace(config, &mut lpar_tok.whitespace_before.borrow_mut())?, + self.lpar.map(|lpar| lpar.inflate(config)).transpose()?, + self.bases.inflate(config)?, + self.keywords.inflate(config)?, + self.rpar.map(|rpar| rpar.inflate(config)).transpose()?, + // TODO: set whitespace_after_arg for last arg? + ) + } else { + Default::default() + }; - self.whitespace_before_colon = parse_simple_whitespace( + let whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; + let body = self.body.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + name, + body, + bases, + keywords, + decorators, + lpar, + rpar, + leading_lines, + lines_after_decorators, + whitespace_after_class, + whitespace_after_name, + whitespace_before_colon, + }) } } -impl<'a> ClassDef<'a> { - pub fn with_decorators(self, decorators: Vec>) -> Self { +impl<'r, 'a> DeflatedClassDef<'r, 'a> { + pub fn with_decorators(self, decorators: Vec>) -> Self { Self { decorators, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Finally<'a> { pub body: Suite<'a>, pub leading_lines: Vec>, @@ -1674,24 +1820,28 @@ impl<'a> Codegen<'a> for Finally<'a> { } } -impl<'a> Inflate<'a> for Finally<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedFinally<'r, 'a> { + type Inflated = Finally<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.finally_tok).whitespace_before.borrow_mut(), None, )?; - self.whitespace_before_colon = parse_simple_whitespace( + let whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; - Ok(self) + let body = self.body.inflate(config)?; + Ok(Self::Inflated { + body, + leading_lines, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ExceptHandler<'a> { pub body: Suite<'a>, pub r#type: Option>, @@ -1725,34 +1875,43 @@ impl<'a> Codegen<'a> for ExceptHandler<'a> { } } -impl<'a> Inflate<'a> for ExceptHandler<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedExceptHandler<'r, 'a> { + type Inflated = ExceptHandler<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.except_tok).whitespace_before.borrow_mut(), None, )?; - self.whitespace_after_except = parse_simple_whitespace( + let whitespace_after_except = parse_simple_whitespace( config, &mut (*self.except_tok).whitespace_after.borrow_mut(), )?; - self.r#type = self.r#type.inflate(config)?; - self.name = self.name.inflate(config)?; - if self.name.is_some() { - self.whitespace_before_colon = parse_simple_whitespace( + let r#type = self.r#type.inflate(config)?; + let name = self.name.inflate(config)?; + let whitespace_before_colon = if name.is_some() { + parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), - )?; - } + )? + } else { + Default::default() + }; - self.body = self.body.inflate(config)?; - Ok(self) + let body = self.body.inflate(config)?; + Ok(Self::Inflated { + body, + r#type, + name, + leading_lines, + whitespace_after_except, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct ExceptStarHandler<'a> { pub body: Suite<'a>, pub r#type: Expression<'a>, @@ -1788,34 +1947,41 @@ impl<'a> Codegen<'a> for ExceptStarHandler<'a> { } } -impl<'a> Inflate<'a> for ExceptStarHandler<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedExceptStarHandler<'r, 'a> { + type Inflated = ExceptStarHandler<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut self.except_tok.whitespace_before.borrow_mut(), None, )?; - self.whitespace_after_except = + let whitespace_after_except = parse_simple_whitespace(config, &mut self.except_tok.whitespace_after.borrow_mut())?; - self.whitespace_after_star = + let whitespace_after_star = parse_simple_whitespace(config, &mut self.star_tok.whitespace_after.borrow_mut())?; - self.r#type = self.r#type.inflate(config)?; - self.name = self.name.inflate(config)?; - if self.name.is_some() { - self.whitespace_before_colon = parse_simple_whitespace( - config, - &mut self.colon_tok.whitespace_before.borrow_mut(), - )?; - } + let r#type = self.r#type.inflate(config)?; + let name = self.name.inflate(config)?; + let whitespace_before_colon = if name.is_some() { + parse_simple_whitespace(config, &mut self.colon_tok.whitespace_before.borrow_mut())? + } else { + Default::default() + }; - self.body = self.body.inflate(config)?; - Ok(self) + let body = self.body.inflate(config)?; + Ok(Self::Inflated { + body, + r#type, + name, + leading_lines, + whitespace_after_except, + whitespace_after_star, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Try<'a> { pub body: Suite<'a>, pub handlers: Vec>, @@ -1850,25 +2016,32 @@ impl<'a> Codegen<'a> for Try<'a> { } } -impl<'a> Inflate<'a> for Try<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedTry<'r, 'a> { + type Inflated = Try<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.try_tok).whitespace_before.borrow_mut(), None, )?; - self.whitespace_before_colon = + let whitespace_before_colon = parse_simple_whitespace(config, &mut (*self.try_tok).whitespace_after.borrow_mut())?; - self.body = self.body.inflate(config)?; - self.handlers = self.handlers.inflate(config)?; - self.orelse = self.orelse.inflate(config)?; - self.finalbody = self.finalbody.inflate(config)?; - Ok(self) + let body = self.body.inflate(config)?; + let handlers = self.handlers.inflate(config)?; + let orelse = self.orelse.inflate(config)?; + let finalbody = self.finalbody.inflate(config)?; + Ok(Self::Inflated { + body, + handlers, + orelse, + finalbody, + leading_lines, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct TryStar<'a> { pub body: Suite<'a>, pub handlers: Vec>, @@ -1903,25 +2076,32 @@ impl<'a> Codegen<'a> for TryStar<'a> { } } -impl<'a> Inflate<'a> for TryStar<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedTryStar<'r, 'a> { + type Inflated = TryStar<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut (*self.try_tok).whitespace_before.borrow_mut(), None, )?; - self.whitespace_before_colon = + let whitespace_before_colon = parse_simple_whitespace(config, &mut (*self.try_tok).whitespace_after.borrow_mut())?; - self.body = self.body.inflate(config)?; - self.handlers = self.handlers.inflate(config)?; - self.orelse = self.orelse.inflate(config)?; - self.finalbody = self.finalbody.inflate(config)?; - Ok(self) + let body = self.body.inflate(config)?; + let handlers = self.handlers.inflate(config)?; + let orelse = self.orelse.inflate(config)?; + let finalbody = self.finalbody.inflate(config)?; + Ok(Self::Inflated { + body, + handlers, + orelse, + finalbody, + leading_lines, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct AugAssign<'a> { pub target: AssignTargetExpression<'a>, pub operator: AugOp<'a>, @@ -1929,13 +2109,19 @@ pub struct AugAssign<'a> { pub semicolon: Option>, } -impl<'a> Inflate<'a> for AugAssign<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.target = self.target.inflate(config)?; - self.operator = self.operator.inflate(config)?; - self.value = self.value.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedAugAssign<'r, 'a> { + type Inflated = AugAssign<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let target = self.target.inflate(config)?; + let operator = self.operator.inflate(config)?; + let value = self.value.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + target, + operator, + value, + semicolon, + }) } } @@ -1951,30 +2137,33 @@ impl<'a> Codegen<'a> for AugAssign<'a> { } } -impl<'a> AugAssign<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedAugAssign<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct WithItem<'a> { pub item: Expression<'a>, pub asname: Option>, pub comma: Option>, } -impl<'a> WithItem<'a> { - fn inflate_withitem(mut self, config: &Config<'a>, is_last: bool) -> Result { - self.item = self.item.inflate(config)?; - self.asname = self.asname.inflate(config)?; - self.comma = if is_last { +impl<'r, 'a> DeflatedWithItem<'r, 'a> { + fn inflate_withitem(self, config: &Config<'a>, is_last: bool) -> Result> { + let item = self.item.inflate(config)?; + let asname = self.asname.inflate(config)?; + let comma = if is_last { self.comma.map(|c| c.inflate_before(config)).transpose()? } else { self.comma.map(|c| c.inflate(config)).transpose()? }; - Ok(self) + Ok(WithItem { + item, + asname, + comma, + }) } } @@ -1990,8 +2179,8 @@ impl<'a> Codegen<'a> for WithItem<'a> { } } -impl<'a> WithComma<'a> for WithItem<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedWithItem<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { Self { comma: Some(comma), ..self @@ -1999,8 +2188,7 @@ impl<'a> WithComma<'a> for WithItem<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct With<'a> { pub items: Vec>, pub body: Suite<'a>, @@ -2060,8 +2248,9 @@ impl<'a> Codegen<'a> for With<'a> { } } -impl<'a> Inflate<'a> for With<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { +impl<'r, 'a> Inflate<'a> for DeflatedWith<'r, 'a> { + type Inflated = With<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { let whitespace_after = parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; @@ -2077,9 +2266,7 @@ impl<'a> Inflate<'a> for With<'a> { (None, None) }; - self.asynchronous = asynchronous; - - self.leading_lines = if let Some(ll) = leading_lines { + let leading_lines = if let Some(ll) = leading_lines { ll } else { parse_empty_lines( @@ -2089,32 +2276,42 @@ impl<'a> Inflate<'a> for With<'a> { )? }; - self.whitespace_after_with = + let whitespace_after_with = parse_simple_whitespace(config, &mut (*self.with_tok).whitespace_after.borrow_mut())?; - self.lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; + let lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; let len = self.items.len(); - self.items = self + let items = self .items .into_iter() .enumerate() .map(|(idx, el)| el.inflate_withitem(config, idx + 1 == len)) .collect::>>()?; - if !self.items.is_empty() { + let rpar = if !items.is_empty() { // rpar only has whitespace if items is non empty - self.rpar = self.rpar.map(|rpar| rpar.inflate(config)).transpose()?; - } - self.whitespace_before_colon = parse_simple_whitespace( + self.rpar.map(|rpar| rpar.inflate(config)).transpose()? + } else { + Default::default() + }; + let whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - self.body = self.body.inflate(config)?; + let body = self.body.inflate(config)?; - Ok(self) + Ok(Self::Inflated { + items, + body, + asynchronous, + leading_lines, + lpar, + rpar, + whitespace_after_with, + whitespace_before_colon, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, ParenthesizedNode, Inflate)] pub enum DelTargetExpression<'a> { Name(Box>), Attribute(Box>), @@ -2123,28 +2320,29 @@ pub enum DelTargetExpression<'a> { Subscript(Box>), } -impl<'a> std::convert::From> for Expression<'a> { - fn from(d: DelTargetExpression<'a>) -> Self { +impl<'r, 'a> std::convert::From> + for DeflatedExpression<'r, 'a> +{ + fn from(d: DeflatedDelTargetExpression<'r, 'a>) -> Self { match d { - DelTargetExpression::Attribute(a) => Expression::Attribute(a), - DelTargetExpression::List(l) => Expression::List(l), - DelTargetExpression::Name(n) => Expression::Name(n), - DelTargetExpression::Subscript(s) => Expression::Subscript(s), - DelTargetExpression::Tuple(t) => Expression::Tuple(t), + DeflatedDelTargetExpression::Attribute(a) => Self::Attribute(a), + DeflatedDelTargetExpression::List(l) => Self::List(l), + DeflatedDelTargetExpression::Name(n) => Self::Name(n), + DeflatedDelTargetExpression::Subscript(s) => Self::Subscript(s), + DeflatedDelTargetExpression::Tuple(t) => Self::Tuple(t), } } } -impl<'a> std::convert::From> for Element<'a> { - fn from(d: DelTargetExpression<'a>) -> Element { - Element::Simple { +impl<'r, 'a> std::convert::From> for DeflatedElement<'r, 'a> { + fn from(d: DeflatedDelTargetExpression<'r, 'a>) -> Self { + Self::Simple { value: d.into(), comma: None, } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Del<'a> { pub target: DelTargetExpression<'a>, pub whitespace_after_del: SimpleWhitespace<'a>, @@ -2153,13 +2351,18 @@ pub struct Del<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'a> Inflate<'a> for Del<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.whitespace_after_del = +impl<'r, 'a> Inflate<'a> for DeflatedDel<'r, 'a> { + type Inflated = Del<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_del = parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; - self.target = self.target.inflate(config)?; - self.semicolon = self.semicolon.inflate(config)?; - Ok(self) + let target = self.target.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + target, + whitespace_after_del, + semicolon, + }) } } @@ -2174,14 +2377,13 @@ impl<'a> Codegen<'a> for Del<'a> { } } -impl<'a> Del<'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'r, 'a> DeflatedDel<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct Match<'a> { pub subject: Expression<'a>, pub cases: Vec>, @@ -2227,37 +2429,46 @@ impl<'a> Codegen<'a> for Match<'a> { } } -impl<'a> Inflate<'a> for Match<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedMatch<'r, 'a> { + type Inflated = Match<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut self.match_tok.whitespace_before.borrow_mut(), None, )?; - self.whitespace_after_match = + let whitespace_after_match = parse_simple_whitespace(config, &mut self.match_tok.whitespace_after.borrow_mut())?; - self.subject = self.subject.inflate(config)?; - self.whitespace_before_colon = + let subject = self.subject.inflate(config)?; + let whitespace_before_colon = parse_simple_whitespace(config, &mut self.colon_tok.whitespace_before.borrow_mut())?; - self.whitespace_after_colon = + let whitespace_after_colon = parse_trailing_whitespace(config, &mut self.colon_tok.whitespace_after.borrow_mut())?; - self.indent = self.indent_tok.relative_indent; - if self.indent == Some(config.default_indent) { - self.indent = None; + let mut indent = self.indent_tok.relative_indent; + if indent == Some(config.default_indent) { + indent = None; } - self.cases = self.cases.inflate(config)?; + let cases = self.cases.inflate(config)?; // See note about footers in `IndentedBlock`'s inflate fn - self.footer = parse_empty_lines( + let footer = parse_empty_lines( config, &mut self.dedent_tok.whitespace_after.borrow_mut(), Some(self.indent_tok.whitespace_before.borrow().absolute_indent), )?; - Ok(self) + Ok(Self::Inflated { + subject, + cases, + leading_lines, + whitespace_after_match, + whitespace_before_colon, + whitespace_after_colon, + indent, + footer, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct MatchCase<'a> { pub pattern: MatchPattern<'a>, pub guard: Option>, @@ -2295,34 +2506,45 @@ impl<'a> Codegen<'a> for MatchCase<'a> { } } -impl<'a> Inflate<'a> for MatchCase<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.leading_lines = parse_empty_lines( +impl<'r, 'a> Inflate<'a> for DeflatedMatchCase<'r, 'a> { + type Inflated = MatchCase<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let leading_lines = parse_empty_lines( config, &mut self.case_tok.whitespace_before.borrow_mut(), None, )?; - self.whitespace_after_case = + let whitespace_after_case = parse_simple_whitespace(config, &mut self.case_tok.whitespace_after.borrow_mut())?; - self.pattern = self.pattern.inflate(config)?; - if let Some(if_tok) = self.if_tok.as_mut() { - self.whitespace_before_if = - parse_simple_whitespace(config, &mut if_tok.whitespace_before.borrow_mut())?; - self.whitespace_after_if = - parse_simple_whitespace(config, &mut if_tok.whitespace_after.borrow_mut())?; - - self.guard = self.guard.inflate(config)?; - } - self.whitespace_before_colon = + let pattern = self.pattern.inflate(config)?; + let (whitespace_before_if, whitespace_after_if, guard) = + if let Some(if_tok) = self.if_tok.as_mut() { + ( + parse_simple_whitespace(config, &mut if_tok.whitespace_before.borrow_mut())?, + parse_simple_whitespace(config, &mut if_tok.whitespace_after.borrow_mut())?, + self.guard.inflate(config)?, + ) + } else { + Default::default() + }; + let whitespace_before_colon = parse_simple_whitespace(config, &mut self.colon_tok.whitespace_before.borrow_mut())?; - self.body = self.body.inflate(config)?; - Ok(self) + let body = self.body.inflate(config)?; + Ok(Self::Inflated { + pattern, + guard, + body, + leading_lines, + whitespace_after_case, + whitespace_before_if, + whitespace_after_if, + whitespace_before_colon, + }) } } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, Inflate, ParenthesizedNode)] pub enum MatchPattern<'a> { Value(MatchValue<'a>), Singleton(MatchSingleton<'a>), @@ -2333,8 +2555,7 @@ pub enum MatchPattern<'a> { Or(Box>), } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct MatchValue<'a> { pub value: Expression<'a>, } @@ -2365,15 +2586,33 @@ impl<'a> Codegen<'a> for MatchValue<'a> { } } -impl<'a> Inflate<'a> for MatchValue<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.value = self.value.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedMatchValue<'r, 'a> { + type Inflated = MatchValue<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let value = self.value.inflate(config)?; + Ok(Self::Inflated { value }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +impl<'r, 'a> ParenthesizedDeflatedNode<'r, 'a> for DeflatedMatchValue<'r, 'a> { + fn lpar(&self) -> &Vec> { + self.value.lpar() + } + fn rpar(&self) -> &Vec> { + self.value.rpar() + } + fn with_parens( + self, + left: DeflatedLeftParen<'r, 'a>, + right: DeflatedRightParen<'r, 'a>, + ) -> Self { + Self { + value: self.value.with_parens(left, right), + } + } +} + +#[cst_node] pub struct MatchSingleton<'a> { pub value: Name<'a>, } @@ -2404,23 +2643,40 @@ impl<'a> Codegen<'a> for MatchSingleton<'a> { } } -impl<'a> Inflate<'a> for MatchSingleton<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.value = self.value.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedMatchSingleton<'r, 'a> { + type Inflated = MatchSingleton<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let value = self.value.inflate(config)?; + Ok(Self::Inflated { value }) + } +} + +impl<'r, 'a> ParenthesizedDeflatedNode<'r, 'a> for DeflatedMatchSingleton<'r, 'a> { + fn lpar(&self) -> &Vec> { + self.value.lpar() + } + fn rpar(&self) -> &Vec> { + self.value.rpar() + } + fn with_parens( + self, + left: DeflatedLeftParen<'r, 'a>, + right: DeflatedRightParen<'r, 'a>, + ) -> Self { + Self { + value: self.value.with_parens(left, right), + } } } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(Codegen, Inflate, ParenthesizedNode)] pub enum MatchSequence<'a> { MatchList(MatchList<'a>), MatchTuple(MatchTuple<'a>), } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct MatchList<'a> { pub patterns: Vec>, pub lbracket: Option>, @@ -2446,27 +2702,33 @@ impl<'a> Codegen<'a> for MatchList<'a> { } } -impl<'a> Inflate<'a> for MatchList<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.lbracket = self.lbracket.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedMatchList<'r, 'a> { + type Inflated = MatchList<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let lbracket = self.lbracket.inflate(config)?; let len = self.patterns.len(); - self.patterns = self + let patterns = self .patterns .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) .collect::>>()?; - self.rbracket = self.rbracket.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let rbracket = self.rbracket.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + patterns, + lbracket, + rbracket, + lpar, + rpar, + }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct MatchTuple<'a> { pub patterns: Vec>, pub lpar: Vec>, @@ -2488,24 +2750,28 @@ impl<'a> Codegen<'a> for MatchTuple<'a> { } } -impl<'a> Inflate<'a> for MatchTuple<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedMatchTuple<'r, 'a> { + type Inflated = MatchTuple<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; let len = self.patterns.len(); - self.patterns = self + let patterns = self .patterns .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) .collect::>>()?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + patterns, + lpar, + rpar, + }) } } #[allow(clippy::large_enum_variant)] -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub enum StarrableMatchSequenceElement<'a> { Simple(MatchSequenceElement<'a>), Starred(MatchStar<'a>), @@ -2523,16 +2789,26 @@ impl<'a> StarrableMatchSequenceElement<'a> { Self::Starred(s) => s.codegen(state, default_comma, default_comma_whitespace), } } - fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result { +} +impl<'r, 'a> DeflatedStarrableMatchSequenceElement<'r, 'a> { + fn inflate_element( + self, + config: &Config<'a>, + last_element: bool, + ) -> Result> { Ok(match self { - Self::Simple(s) => Self::Simple(s.inflate_element(config, last_element)?), - Self::Starred(s) => Self::Starred(s.inflate_element(config, last_element)?), + Self::Simple(s) => { + StarrableMatchSequenceElement::Simple(s.inflate_element(config, last_element)?) + } + Self::Starred(s) => { + StarrableMatchSequenceElement::Starred(s.inflate_element(config, last_element)?) + } }) } } -impl<'a> WithComma<'a> for StarrableMatchSequenceElement<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedStarrableMatchSequenceElement<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { match self { Self::Simple(s) => Self::Simple(s.with_comma(comma)), Self::Starred(s) => Self::Starred(s.with_comma(comma)), @@ -2540,8 +2816,7 @@ impl<'a> WithComma<'a> for StarrableMatchSequenceElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct MatchSequenceElement<'a> { pub value: MatchPattern<'a>, pub comma: Option>, @@ -2560,20 +2835,25 @@ impl<'a> MatchSequenceElement<'a> { state.add_token(if default_comma_whitespace { ", " } else { "," }); } } - - fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { - self.value = self.value.inflate(config)?; - self.comma = if last_element { +} +impl<'r, 'a> DeflatedMatchSequenceElement<'r, 'a> { + fn inflate_element( + self, + config: &Config<'a>, + last_element: bool, + ) -> Result> { + let value = self.value.inflate(config)?; + let comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(self) + Ok(MatchSequenceElement { value, comma }) } } -impl<'a> WithComma<'a> for MatchSequenceElement<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchSequenceElement<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { Self { comma: Some(comma), ..self @@ -2581,8 +2861,7 @@ impl<'a> WithComma<'a> for MatchSequenceElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct MatchStar<'a> { pub name: Option>, pub comma: Option>, @@ -2610,24 +2889,29 @@ impl<'a> MatchStar<'a> { state.add_token(if default_comma_whitespace { ", " } else { "," }); } } - - fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { - self.whitespace_before_name = parse_parenthesizable_whitespace( +} +impl<'r, 'a> DeflatedMatchStar<'r, 'a> { + fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result> { + let whitespace_before_name = parse_parenthesizable_whitespace( config, &mut self.star_tok.whitespace_after.borrow_mut(), )?; - self.name = self.name.inflate(config)?; - self.comma = if last_element { + let name = self.name.inflate(config)?; + let comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(self) + Ok(MatchStar { + name, + comma, + whitespace_before_name, + }) } } -impl<'a> WithComma<'a> for MatchStar<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchStar<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { Self { comma: Some(comma), ..self @@ -2635,8 +2919,7 @@ impl<'a> WithComma<'a> for MatchStar<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct MatchMapping<'a> { pub elements: Vec>, pub rest: Option>, @@ -2670,38 +2953,50 @@ impl<'a> Codegen<'a> for MatchMapping<'a> { } } -impl<'a> Inflate<'a> for MatchMapping<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.lbrace = self.lbrace.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedMatchMapping<'r, 'a> { + type Inflated = MatchMapping<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let lbrace = self.lbrace.inflate(config)?; let len = self.elements.len(); let no_star = self.star_tok.is_none(); - self.elements = self + let elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, no_star && idx + 1 == len)) .collect::>>()?; - if let Some(star_tok) = self.star_tok.as_mut() { - self.whitespace_before_rest = - parse_simple_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())?; - self.rest = self.rest.inflate(config)?; - self.trailing_comma = self - .trailing_comma - .map(|c| c.inflate_before(config)) - .transpose()?; - } + let (whitespace_before_rest, rest, trailing_comma) = + if let Some(star_tok) = self.star_tok.as_mut() { + ( + parse_simple_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())?, + self.rest.inflate(config)?, + self.trailing_comma + .map(|c| c.inflate_before(config)) + .transpose()?, + ) + } else { + Default::default() + }; - self.rbrace = self.rbrace.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let rbrace = self.rbrace.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + elements, + rest, + trailing_comma, + lbrace, + rbrace, + lpar, + rpar, + whitespace_before_rest, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct MatchMappingElement<'a> { pub key: Expression<'a>, pub pattern: MatchPattern<'a>, @@ -2725,29 +3020,40 @@ impl<'a> MatchMappingElement<'a> { state.add_token(", "); } } - - fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { - self.key = self.key.inflate(config)?; - self.whitespace_before_colon = parse_parenthesizable_whitespace( +} +impl<'r, 'a> DeflatedMatchMappingElement<'r, 'a> { + fn inflate_element( + self, + config: &Config<'a>, + last_element: bool, + ) -> Result> { + let key = self.key.inflate(config)?; + let whitespace_before_colon = parse_parenthesizable_whitespace( config, &mut self.colon_tok.whitespace_before.borrow_mut(), )?; - self.whitespace_after_colon = parse_parenthesizable_whitespace( + let whitespace_after_colon = parse_parenthesizable_whitespace( config, &mut self.colon_tok.whitespace_after.borrow_mut(), )?; - self.pattern = self.pattern.inflate(config)?; - self.comma = if last_element { + let pattern = self.pattern.inflate(config)?; + let comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(self) + Ok(MatchMappingElement { + key, + pattern, + comma, + whitespace_before_colon, + whitespace_after_colon, + }) } } -impl<'a> WithComma<'a> for MatchMappingElement<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchMappingElement<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { Self { comma: Some(comma), ..self @@ -2755,8 +3061,7 @@ impl<'a> WithComma<'a> for MatchMappingElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct MatchClass<'a> { pub cls: NameOrAttribute<'a>, pub patterns: Vec>, @@ -2793,47 +3098,56 @@ impl<'a> Codegen<'a> for MatchClass<'a> { } } -impl<'a> Inflate<'a> for MatchClass<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; +impl<'r, 'a> Inflate<'a> for DeflatedMatchClass<'r, 'a> { + type Inflated = MatchClass<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; - self.cls = self.cls.inflate(config)?; - self.whitespace_after_cls = parse_parenthesizable_whitespace( + let cls = self.cls.inflate(config)?; + let whitespace_after_cls = parse_parenthesizable_whitespace( config, &mut self.lpar_tok.whitespace_before.borrow_mut(), )?; - self.whitespace_before_patterns = parse_parenthesizable_whitespace( + let whitespace_before_patterns = parse_parenthesizable_whitespace( config, &mut self.lpar_tok.whitespace_after.borrow_mut(), )?; let patlen = self.patterns.len(); let kwdlen = self.kwds.len(); - self.patterns = self + let patterns = self .patterns .into_iter() .enumerate() .map(|(idx, pat)| pat.inflate_element(config, idx + 1 == patlen + kwdlen)) .collect::>()?; - self.kwds = self + let kwds = self .kwds .into_iter() .enumerate() .map(|(idx, kwd)| kwd.inflate_element(config, idx + 1 == kwdlen)) .collect::>()?; - self.whitespace_after_kwds = parse_parenthesizable_whitespace( + let whitespace_after_kwds = parse_parenthesizable_whitespace( config, &mut self.rpar_tok.whitespace_before.borrow_mut(), )?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + cls, + patterns, + kwds, + lpar, + rpar, + whitespace_after_cls, + whitespace_before_patterns, + whitespace_after_kwds, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct MatchKeywordElement<'a> { pub key: Name<'a>, pub pattern: MatchPattern<'a>, @@ -2857,28 +3171,40 @@ impl<'a> MatchKeywordElement<'a> { state.add_token(", "); } } - fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { - self.key = self.key.inflate(config)?; - self.whitespace_before_equal = parse_parenthesizable_whitespace( +} +impl<'r, 'a> DeflatedMatchKeywordElement<'r, 'a> { + fn inflate_element( + self, + config: &Config<'a>, + last_element: bool, + ) -> Result> { + let key = self.key.inflate(config)?; + let whitespace_before_equal = parse_parenthesizable_whitespace( config, &mut self.equal_tok.whitespace_before.borrow_mut(), )?; - self.whitespace_after_equal = parse_parenthesizable_whitespace( + let whitespace_after_equal = parse_parenthesizable_whitespace( config, &mut self.equal_tok.whitespace_after.borrow_mut(), )?; - self.pattern = self.pattern.inflate(config)?; - self.comma = if last_element { + let pattern = self.pattern.inflate(config)?; + let comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(self) + Ok(MatchKeywordElement { + key, + pattern, + comma, + whitespace_before_equal, + whitespace_after_equal, + }) } } -impl<'a> WithComma<'a> for MatchKeywordElement<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self { +impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchKeywordElement<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { Self { comma: Some(comma), ..self @@ -2886,8 +3212,7 @@ impl<'a> WithComma<'a> for MatchKeywordElement<'a> { } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct MatchAs<'a> { pub pattern: Option>, pub name: Option>, @@ -2918,28 +3243,40 @@ impl<'a> Codegen<'a> for MatchAs<'a> { } } -impl<'a> Inflate<'a> for MatchAs<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.pattern = self.pattern.inflate(config)?; - if let Some(as_tok) = self.as_tok.as_mut() { - self.whitespace_before_as = Some(parse_parenthesizable_whitespace( - config, - &mut as_tok.whitespace_before.borrow_mut(), - )?); - self.whitespace_after_as = Some(parse_parenthesizable_whitespace( - config, - &mut as_tok.whitespace_after.borrow_mut(), - )?); - } - self.name = self.name.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedMatchAs<'r, 'a> { + type Inflated = MatchAs<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let pattern = self.pattern.inflate(config)?; + let (whitespace_before_as, whitespace_after_as) = if let Some(as_tok) = self.as_tok.as_mut() + { + ( + Some(parse_parenthesizable_whitespace( + config, + &mut as_tok.whitespace_before.borrow_mut(), + )?), + Some(parse_parenthesizable_whitespace( + config, + &mut as_tok.whitespace_after.borrow_mut(), + )?), + ) + } else { + Default::default() + }; + let name = self.name.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + pattern, + name, + lpar, + rpar, + whitespace_before_as, + whitespace_after_as, + }) } } -#[derive(Debug, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node] pub struct MatchOrElement<'a> { pub pattern: MatchPattern<'a>, pub separator: Option>, @@ -2955,16 +3292,16 @@ impl<'a> MatchOrElement<'a> { } } -impl<'a> Inflate<'a> for MatchOrElement<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.pattern = self.pattern.inflate(config)?; - self.separator = self.separator.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedMatchOrElement<'r, 'a> { + type Inflated = MatchOrElement<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let pattern = self.pattern.inflate(config)?; + let separator = self.separator.inflate(config)?; + Ok(Self::Inflated { pattern, separator }) } } -#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[cst_node(ParenthesizedNode)] pub struct MatchOr<'a> { pub patterns: Vec>, pub lpar: Vec>, @@ -2982,11 +3319,16 @@ impl<'a> Codegen<'a> for MatchOr<'a> { } } -impl<'a> Inflate<'a> for MatchOr<'a> { - fn inflate(mut self, config: &Config<'a>) -> Result { - self.lpar = self.lpar.inflate(config)?; - self.patterns = self.patterns.inflate(config)?; - self.rpar = self.rpar.inflate(config)?; - Ok(self) +impl<'r, 'a> Inflate<'a> for DeflatedMatchOr<'r, 'a> { + type Inflated = MatchOr<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let patterns = self.patterns.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + patterns, + lpar, + rpar, + }) } } diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index f8859641..1efa29fc 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -4,13 +4,15 @@ // LICENSE file in the root directory of this source tree use crate::{ + nodes::expression::{DeflatedLeftParen, DeflatedRightParen}, + nodes::op::DeflatedComma, tokenizer::whitespace_parser::{Config, WhitespaceError}, - Codegen, CodegenState, Comma, EmptyLine, LeftParen, RightParen, + Codegen, CodegenState, EmptyLine, LeftParen, RightParen, }; use std::ops::Deref; -pub trait WithComma<'a> { - fn with_comma(self, comma: Comma<'a>) -> Self; +pub trait WithComma<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self; } pub trait ParenthesizedNode<'a> { @@ -51,6 +53,32 @@ impl<'a, T: ParenthesizedNode<'a>> ParenthesizedNode<'a> for Box { } } +pub trait ParenthesizedDeflatedNode<'r, 'a> { + fn lpar(&self) -> &Vec>; + fn rpar(&self) -> &Vec>; + + fn with_parens( + self, + left: DeflatedLeftParen<'r, 'a>, + right: DeflatedRightParen<'r, 'a>, + ) -> Self; +} +impl<'r, 'a, T: ParenthesizedDeflatedNode<'r, 'a>> ParenthesizedDeflatedNode<'r, 'a> for Box { + fn lpar(&self) -> &Vec> { + self.deref().lpar() + } + fn rpar(&self) -> &Vec> { + self.deref().rpar() + } + fn with_parens( + self, + left: DeflatedLeftParen<'r, 'a>, + right: DeflatedRightParen<'r, 'a>, + ) -> Self { + Self::new((*self).with_parens(left, right)) + } +} + pub trait WithLeadingLines<'a> { fn leading_lines(&mut self) -> &mut Vec>; } @@ -61,17 +89,20 @@ pub trait Inflate<'a> where Self: Sized, { - fn inflate(self, config: &Config<'a>) -> Result; + type Inflated; + fn inflate(self, config: &Config<'a>) -> Result; } impl<'a, T: Inflate<'a>> Inflate<'a> for Option { - fn inflate(self, config: &Config<'a>) -> Result { + type Inflated = Option; + fn inflate(self, config: &Config<'a>) -> Result { self.map(|x| x.inflate(config)).transpose() } } impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box { - fn inflate(self, config: &Config<'a>) -> Result { + type Inflated = Box; + fn inflate(self, config: &Config<'a>) -> Result { match (*self).inflate(config) { Ok(a) => Ok(Box::new(a)), Err(e) => Err(e), @@ -80,7 +111,8 @@ impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box { } impl<'a, T: Inflate<'a>> Inflate<'a> for Vec { - fn inflate(self, config: &Config<'a>) -> Result { + type Inflated = Vec; + fn inflate(self, config: &Config<'a>) -> Result { self.into_iter().map(|item| item.inflate(config)).collect() } } diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 3aaf7130..25fa225f 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -5,9 +5,14 @@ use std::rc::Rc; -use crate::nodes::*; +use crate::expression::make_async; +use crate::nodes::deflated::*; +use crate::nodes::expression::make_fstringtext; +use crate::nodes::op::make_importstar; +use crate::nodes::traits::ParenthesizedDeflatedNode; use crate::parser::ParserError; use crate::tokenizer::{TokType, Token}; +use crate::WithComma; use peg::str::LineCol; use peg::{parser, Parse, ParseElem, RuleResult}; use TokType::{ @@ -67,14 +72,14 @@ impl<'a> Parse for TokVec<'a> { } } -type TokenRef<'a> = Rc>; +type TokenRef<'input, 'a> = &'input Token<'a>; -impl<'a> ParseElem for TokVec<'a> { - type Element = TokenRef<'a>; +impl<'input, 'a: 'input> ParseElem<'input> for TokVec<'a> { + type Element = TokenRef<'input, 'a>; - fn parse_elem(&self, pos: usize) -> RuleResult { + fn parse_elem(&'input self, pos: usize) -> RuleResult { match self.0.get(pos) { - Some(tok) => RuleResult::Matched(pos + 1, tok.clone()), + Some(tok) => RuleResult::Matched(pos + 1, tok), None => RuleResult::Failed, } } @@ -85,32 +90,32 @@ parser! { // Starting Rules - pub rule file(encoding: Option<&str>) -> Module<'a> + pub rule file(encoding: Option<&str>) -> Module<'input, 'a> = traced(<_file(encoding.unwrap_or("utf-8"))>) - pub rule expression_input() -> Expression<'a> + pub rule expression_input() -> Expression<'input, 'a> = traced() - pub rule statement_input() -> Statement<'a> + pub rule statement_input() -> Statement<'input, 'a> = traced() - rule _file(encoding: &str) -> Module<'a> + rule _file(encoding: &str) -> Module<'input, 'a> = s:statements()? eof:tok(EndMarker, "EOF") { make_module(s.unwrap_or_default(), eof, encoding) } // General statements - rule statements() -> Vec> + rule statements() -> Vec> = statement()+ - rule statement() -> Statement<'a> + rule statement() -> Statement<'input, 'a> = c:compound_stmt() { Statement::Compound(c) } / s:simple_stmts() { Statement::Simple(make_simple_statement_line(s)) } - rule simple_stmts() -> SimpleStatementParts<'a> + rule simple_stmts() -> SimpleStatementParts<'input, 'a> = first_tok:&_ stmts:separated_trailer(, ) nl:tok(NL, "NEWLINE") { SimpleStatementParts { first_tok, @@ -122,7 +127,7 @@ parser! { } #[cache] - rule simple_stmt() -> SmallStatement<'a> + rule simple_stmt() -> SmallStatement<'input, 'a> = assignment() / e:star_expressions() { SmallStatement::Expr(Expr { value: e, semicolon: None }) } / &lit("return") s:return_stmt() { SmallStatement::Return(s) } @@ -140,7 +145,7 @@ parser! { / &lit("nonlocal") s:nonlocal_stmt() {SmallStatement::Nonlocal(s)} - rule compound_stmt() -> CompoundStatement<'a> + rule compound_stmt() -> CompoundStatement<'input, 'a> = &(lit("def") / lit("@") / tok(Async, "ASYNC")) f:function_def() { CompoundStatement::FunctionDef(f) } @@ -155,7 +160,7 @@ parser! { // Simple statements - rule assignment() -> SmallStatement<'a> + rule assignment() -> SmallStatement<'input, 'a> = a:name() col:lit(":") ann:expression() rhs:(eq:lit("=") d:annotated_rhs() {(eq, d)})? { SmallStatement::AnnAssign(make_ann_assignment( @@ -173,10 +178,10 @@ parser! { SmallStatement::AugAssign(make_aug_assign(t, op, rhs)) } - rule annotated_rhs() -> Expression<'a> + rule annotated_rhs() -> Expression<'input, 'a> = yield_expr() / star_expressions() - rule augassign() -> AugOp<'a> + rule augassign() -> AugOp<'input, 'a> = &(lit("+=") / lit("-=") / lit("*=") @@ -193,12 +198,12 @@ parser! { make_aug_op(tok).map_err(|_| "aug_op") } - rule return_stmt() -> Return<'a> + rule return_stmt() -> Return<'input, 'a> = kw:lit("return") a:star_expressions()? { make_return(kw, a) } - rule raise_stmt() -> Raise<'a> + rule raise_stmt() -> Raise<'input, 'a> = kw:lit("raise") exc:expression() rest:(f:lit("from") cau:expression() {(f, cau)})? { make_raise(kw, Some(exc), rest) @@ -207,17 +212,17 @@ parser! { make_raise(kw, None, None) } - rule global_stmt() -> Global<'a> + rule global_stmt() -> Global<'input, 'a> = kw:lit("global") init:(n:name() c:comma() {(n, c)})* last:name() { make_global(kw, init, last) } - rule nonlocal_stmt() -> Nonlocal<'a> + rule nonlocal_stmt() -> Nonlocal<'input, 'a> = kw:lit("nonlocal") init:(n:name() c:comma() {(n, c)})* last:name() { make_nonlocal(kw, init, last) } - rule del_stmt() -> Del<'a> + rule del_stmt() -> Del<'input, 'a> = kw:lit("del") t:del_target() &(lit(";") / tok(NL, "NEWLINE")) { make_del(kw, t) } @@ -225,22 +230,22 @@ parser! { make_del(kw, make_del_tuple(None, t, None)) } - rule yield_stmt() -> Expression<'a> + rule yield_stmt() -> Expression<'input, 'a> = yield_expr() - rule assert_stmt() -> Assert<'a> + rule assert_stmt() -> Assert<'input, 'a> = kw:lit("assert") test:expression() rest:(c:comma() msg:expression() {(c, msg)})? { make_assert(kw, test, rest) } // Import statements - rule import_name() -> Import<'a> + rule import_name() -> Import<'input, 'a> = kw:lit("import") a:dotted_as_names() { make_import(kw, a) } - rule import_from() -> ImportFrom<'a> + rule import_from() -> ImportFrom<'input, 'a> = from:lit("from") dots:dots()? m:dotted_name() import:lit("import") als:import_from_targets() { make_import_from(from, dots.unwrap_or_default(), Some(m), import, als) @@ -250,7 +255,7 @@ parser! { make_import_from(from, dots, None, import, als) } - rule import_from_targets() -> ParenthesizedImportNames<'a> + rule import_from_targets() -> ParenthesizedImportNames<'input, 'a> = lpar:lpar() als:import_from_as_names() c:comma()? rpar:rpar() { let mut als = als; if let (comma@Some(_), Some(mut last)) = (c, als.last_mut()) { @@ -259,31 +264,31 @@ parser! { (Some(lpar), ImportNames::Aliases(als), Some(rpar)) } / als:import_from_as_names() !lit(",") { (None, ImportNames::Aliases(als), None)} - / star:lit("*") { (None, ImportNames::Star(ImportStar {}), None) } + / star:lit("*") { (None, ImportNames::Star(make_importstar()), None) } - rule import_from_as_names() -> Vec> + rule import_from_as_names() -> Vec> = items:separated(, ) { make_import_from_as_names(items.0, items.1) } - rule import_from_as_name() -> ImportAlias<'a> + rule import_from_as_name() -> ImportAlias<'input, 'a> = n:name() asname:(kw:lit("as") z:name() {(kw, z)})? { make_import_alias(NameOrAttribute::N(Box::new(n)), asname) } - rule dotted_as_names() -> Vec> + rule dotted_as_names() -> Vec> = init:(d:dotted_as_name() c:comma() {d.with_comma(c)})* last:dotted_as_name() { concat(init, vec![last]) } - rule dotted_as_name() -> ImportAlias<'a> + rule dotted_as_name() -> ImportAlias<'input, 'a> = n:dotted_name() asname:(kw:lit("as") z:name() {(kw, z)})? { make_import_alias(n, asname) } // TODO: why does this diverge from CPython? - rule dotted_name() -> NameOrAttribute<'a> + rule dotted_name() -> NameOrAttribute<'input, 'a> = first:name() tail:(dot:lit(".") n:name() {(dot, n)})* { make_name_or_attr(first, tail) } @@ -293,7 +298,7 @@ parser! { // Common elements #[cache] - rule block() -> Suite<'a> + rule block() -> Suite<'input, 'a> = n:tok(NL, "NEWLINE") ind:tok(Indent, "INDENT") s:statements() ded:tok(Dedent, "DEDENT") { make_indented_block(n, ind, s, ded) } @@ -301,18 +306,18 @@ parser! { make_simple_statement_suite(s) } - rule decorators() -> Vec> + rule decorators() -> Vec> = (at:lit("@") e:named_expression() nl:tok(NL, "NEWLINE") { make_decorator(at, e, nl) } )+ // Class definitions - rule class_def() -> ClassDef<'a> + rule class_def() -> ClassDef<'input, 'a> = d:decorators() c:class_def_raw() { c.with_decorators(d) } / class_def_raw() - rule class_def_raw() -> ClassDef<'a> + rule class_def_raw() -> ClassDef<'input, 'a> = kw:lit("class") n:name() arg:(l:lpar() a:arguments()? r:rpar() {(l, a, r)})? col:lit(":") b:block() {? make_class_def(kw, n, arg, col, b) @@ -320,16 +325,16 @@ parser! { // Function definitions - rule function_def() -> FunctionDef<'a> + rule function_def() -> FunctionDef<'input, 'a> = d:decorators() f:function_def_raw() {f.with_decorators(d)} / function_def_raw() - rule _returns() -> Annotation<'a> + rule _returns() -> Annotation<'input, 'a> = l:lit("->") e:expression() { make_annotation(l, e) } - rule function_def_raw() -> FunctionDef<'a> + rule function_def_raw() -> FunctionDef<'input, 'a> = def:lit("def") n:name() op:lit("(") params:params()? cp:lit(")") ty:_returns()? c:lit(":") b:block() { make_function_def(None, def, n, op, params, cp, ty, c, b) @@ -341,10 +346,10 @@ parser! { // Function parameters - rule params() -> Parameters<'a> + rule params() -> Parameters<'input, 'a> = parameters() - rule parameters() -> Parameters<'a> + rule parameters() -> Parameters<'input, 'a> = a:slash_no_default() b:param_no_default()* c:param_with_default()* d:star_etc()? { make_parameters(Some(a), concat(b, c), d) } @@ -361,7 +366,7 @@ parser! { make_parameters(None, vec![], Some(d)) } - rule slash_no_default() -> (Vec>, ParamSlash<'a>) + rule slash_no_default() -> (Vec>, ParamSlash<'input, 'a>) = a:param_no_default()+ slash:lit("/") com:comma() { (a, ParamSlash { comma: Some(com)}) } @@ -369,7 +374,7 @@ parser! { (a, ParamSlash { comma: None }) } - rule slash_with_default() -> (Vec>, ParamSlash<'a>) + rule slash_with_default() -> (Vec>, ParamSlash<'input, 'a>) = a:param_no_default()* b:param_with_default()+ slash:lit("/") c:comma() { (concat(a, b), ParamSlash { comma: Some(c) }) } @@ -377,7 +382,7 @@ parser! { (concat(a, b), ParamSlash { comma: None }) } - rule star_etc() -> StarEtc<'a> + rule star_etc() -> StarEtc<'input, 'a> = star:lit("*") a:param_no_default() b:param_maybe_default()* kw:kwds()? { StarEtc(Some(StarArg::Param(Box::new( add_param_star(a, star)))), b, kw) @@ -387,16 +392,16 @@ parser! { } / kw:kwds() { StarEtc(None, vec![], Some(kw)) } - rule kwds() -> Param<'a> + rule kwds() -> Param<'input, 'a> = star:lit("**") a:param_no_default() { add_param_star(a, star) } - rule param_no_default() -> Param<'a> + rule param_no_default() -> Param<'input, 'a> = a:param() c:lit(",") { add_param_default(a, None, Some(c)) } / a:param() &lit(")") {a} - rule param_with_default() -> Param<'a> + rule param_with_default() -> Param<'input, 'a> = a:param() def:default() c:lit(",") { add_param_default(a, Some(def), Some(c)) } @@ -404,7 +409,7 @@ parser! { add_param_default(a, Some(def), None) } - rule param_maybe_default() -> Param<'a> + rule param_maybe_default() -> Param<'input, 'a> = a:param() def:default()? c:lit(",") { add_param_default(a, def, Some(c)) } @@ -412,24 +417,24 @@ parser! { add_param_default(a, def, None) } - rule param() -> Param<'a> + rule param() -> Param<'input, 'a> = n:name() a:annotation()? { Param {name: n, annotation: a, ..Default::default() } } - rule annotation() -> Annotation<'a> + rule annotation() -> Annotation<'input, 'a> = col:lit(":") e:expression() { make_annotation(col, e) } - rule default() -> (AssignEqual<'a>, Expression<'a>) + rule default() -> (AssignEqual<'input, 'a>, Expression<'input, 'a>) = eq:lit("=") ex:expression() { (make_assign_equal(eq), ex) } // If statement - rule if_stmt() -> If<'a> + rule if_stmt() -> If<'input, 'a> = i:lit("if") a:named_expression() col:lit(":") b:block() elif:elif_stmt() { make_if(i, a, col, b, Some(OrElse::Elif(elif)), false) } @@ -437,7 +442,7 @@ parser! { make_if(i, a, col, b, el.map(OrElse::Else), false) } - rule elif_stmt() -> If<'a> + rule elif_stmt() -> If<'input, 'a> = i:lit("elif") a:named_expression() col:lit(":") b:block() elif:elif_stmt() { make_if(i, a, col, b, Some(OrElse::Elif(elif)), true) } @@ -445,21 +450,21 @@ parser! { make_if(i, a, col, b, el.map(OrElse::Else), true) } - rule else_block() -> Else<'a> + rule else_block() -> Else<'input, 'a> = el:lit("else") col:lit(":") b:block() { make_else(el, col, b) } // While statement - rule while_stmt() -> While<'a> + rule while_stmt() -> While<'input, 'a> = kw:lit("while") test:named_expression() col:lit(":") b:block() el:else_block()? { make_while(kw, test, col, b, el) } // For statement - rule for_stmt() -> For<'a> + rule for_stmt() -> For<'input, 'a> = f:lit("for") t:star_targets() i:lit("in") it:star_expressions() c:lit(":") b:block() el:else_block()? { make_for(None, f, t, i, it, c, b, el) @@ -472,7 +477,7 @@ parser! { // With statement - rule with_stmt() -> With<'a> + rule with_stmt() -> With<'input, 'a> = kw:lit("with") l:lpar() items:separated_trailer(, ) r:rpar() col:lit(":") b:block() { make_with(None, kw, Some(l), comma_separate(items.0, items.1, items.2), Some(r), col, b) @@ -490,7 +495,7 @@ parser! { make_with(Some(asy), kw, None, comma_separate(items.0, items.1, None), None, col, b) } - rule with_item() -> WithItem<'a> + rule with_item() -> WithItem<'input, 'a> = e:expression() a:lit("as") t:star_target() &(lit(",") / lit(":")) { make_with_item(e, Some(a), Some(t)) } @@ -500,7 +505,7 @@ parser! { // Try statement - rule try_stmt() -> Try<'a> + rule try_stmt() -> Try<'input, 'a> = kw:lit("try") lit(":") b:block() f:finally_block() { make_try(kw, b, vec![], None, Some(f)) } @@ -510,7 +515,7 @@ parser! { } // Note: this is separate because TryStar is a different type in LibCST - rule try_star_stmt() -> TryStar<'a> + rule try_star_stmt() -> TryStar<'input, 'a> = kw:lit("try") lit(":") b:block() ex:except_star_block()+ el:else_block()? f:finally_block()? { make_try_star(kw, b, ex, el, f) @@ -518,7 +523,7 @@ parser! { // Except statement - rule except_block() -> ExceptHandler<'a> + rule except_block() -> ExceptHandler<'input, 'a> = kw:lit("except") e:expression() a:(k:lit("as") n:name() {(k, n)})? col:lit(":") b:block() { make_except(kw, Some(e), a, col, b) @@ -527,13 +532,13 @@ parser! { make_except(kw, None, None, col, b) } - rule except_star_block() -> ExceptStarHandler<'a> + rule except_star_block() -> ExceptStarHandler<'input, 'a> = kw:lit("except") star:lit("*") e:expression() a:(k:lit("as") n:name() {(k, n)})? col:lit(":") b:block() { make_except_star(kw, star, e, a, col, b) } - rule finally_block() -> Finally<'a> + rule finally_block() -> Finally<'input, 'a> = kw:lit("finally") col:lit(":") b:block() { make_finally(kw, col, b) } @@ -541,13 +546,13 @@ parser! { // Match statement - rule match_stmt() -> Match<'a> + rule match_stmt() -> Match<'input, 'a> = kw:lit("match") subject:subject_expr() col:lit(":") tok(NL, "NEWLINE") i:tok(Indent, "INDENT") cases:case_block()+ d:tok(Dedent, "DEDENT") { make_match(kw, subject, col, i, cases, d) } - rule subject_expr() -> Expression<'a> + rule subject_expr() -> Expression<'input, 'a> = first:star_named_expression() c:comma() rest:star_named_expressions()? { Expression::Tuple(Box::new( make_tuple_from_elements(first.with_comma(c), rest.unwrap_or_default())) @@ -555,35 +560,35 @@ parser! { } / named_expression() - rule case_block() -> MatchCase<'a> + rule case_block() -> MatchCase<'input, 'a> = kw:lit("case") pattern:patterns() guard:guard()? col:lit(":") body:block() { make_case(kw, pattern, guard, col, body) } - rule guard() -> (TokenRef<'a>, Expression<'a>) + rule guard() -> (TokenRef<'input, 'a>, Expression<'input, 'a>) = kw:lit("if") exp:named_expression() { (kw, exp) } - rule patterns() -> MatchPattern<'a> + rule patterns() -> MatchPattern<'input, 'a> = pats:open_sequence_pattern() { MatchPattern::Sequence(make_list_pattern(None, pats, None)) } / pattern() - rule pattern() -> MatchPattern<'a> + rule pattern() -> MatchPattern<'input, 'a> = as_pattern() / or_pattern() - rule as_pattern() -> MatchPattern<'a> + rule as_pattern() -> MatchPattern<'input, 'a> = pat:or_pattern() kw:lit("as") target:pattern_capture_target() { make_as_pattern(Some(pat), Some(kw), Some(target)) } - rule or_pattern() -> MatchPattern<'a> + rule or_pattern() -> MatchPattern<'input, 'a> = pats:separated(, ) { make_or_pattern(pats.0, pats.1) } - rule closed_pattern() -> MatchPattern<'a> + rule closed_pattern() -> MatchPattern<'input, 'a> = literal_pattern() / capture_pattern() / wildcard_pattern() @@ -593,7 +598,7 @@ parser! { / mapping_pattern() / class_pattern() - rule literal_pattern() -> MatchPattern<'a> + rule literal_pattern() -> MatchPattern<'input, 'a> = val:signed_number() !(lit("+") / lit("-")) { make_match_value(val) } / val:complex_number() { make_match_value(val) } / val:strings() { make_match_value(val.into()) } @@ -601,7 +606,7 @@ parser! { / n:lit("True") { make_match_singleton(make_name(n)) } / n:lit("False") { make_match_singleton(make_name(n)) } - rule literal_expr() -> Expression<'a> + rule literal_expr() -> Expression<'input, 'a> = val:signed_number() !(lit("+") / lit("-")) { val } / val:complex_number() { val } / val:strings() { val.into() } @@ -609,59 +614,59 @@ parser! { / n:lit("True") { Expression::Name(Box::new(make_name(n))) } / n:lit("False") { Expression::Name(Box::new(make_name(n))) } - rule complex_number() -> Expression<'a> + rule complex_number() -> Expression<'input, 'a> = re:signed_real_number() op:(lit("+")/lit("-")) im:imaginary_number() {? make_binary_op(re, op, im).map_err(|_| "complex number") } - rule signed_number() -> Expression<'a> + rule signed_number() -> Expression<'input, 'a> = n:tok(Number, "number") { make_number(n) } / op:lit("-") n:tok(Number, "number") {? make_unary_op(op, make_number(n)).map_err(|_| "signed number") } - rule signed_real_number() -> Expression<'a> + rule signed_real_number() -> Expression<'input, 'a> = real_number() / op:lit("-") n:real_number() {? make_unary_op(op, n).map_err(|_| "signed real number") } - rule real_number() -> Expression<'a> + rule real_number() -> Expression<'input, 'a> = n:tok(Number, "number") {? ensure_real_number(n) } - rule imaginary_number() -> Expression<'a> + rule imaginary_number() -> Expression<'input, 'a> = n:tok(Number, "number") {? ensure_imaginary_number(n) } - rule capture_pattern() -> MatchPattern<'a> + rule capture_pattern() -> MatchPattern<'input, 'a> = t:pattern_capture_target() { make_as_pattern(None, None, Some(t)) } - rule pattern_capture_target() -> Name<'a> + rule pattern_capture_target() -> Name<'input, 'a> = !lit("_") n:name() !(lit(".") / lit("(") / lit("=")) { n } - rule wildcard_pattern() -> MatchPattern<'a> + rule wildcard_pattern() -> MatchPattern<'input, 'a> = lit("_") { make_as_pattern(None, None, None) } - rule value_pattern() -> MatchPattern<'a> + rule value_pattern() -> MatchPattern<'input, 'a> = v:attr() !(lit(".") / lit("(") / lit("=")) { make_match_value(v.into()) } // In upstream attr and name_or_attr are mutually recursive, but rust-peg // doesn't support this yet. - rule attr() -> NameOrAttribute<'a> + rule attr() -> NameOrAttribute<'input, 'a> = &(name() lit(".")) v:name_or_attr() { v } #[cache_left_rec] - rule name_or_attr() -> NameOrAttribute<'a> + rule name_or_attr() -> NameOrAttribute<'input, 'a> = val:name_or_attr() d:lit(".") attr:name() { NameOrAttribute::A(Box::new(make_attribute(val.into(), d, attr))) } / n:name() { NameOrAttribute::N(Box::new(n)) } - rule group_pattern() -> MatchPattern<'a> + rule group_pattern() -> MatchPattern<'input, 'a> = l:lpar() pat:pattern() r:rpar() { pat.with_parens(l, r) } - rule sequence_pattern() -> MatchPattern<'a> + rule sequence_pattern() -> MatchPattern<'input, 'a> = l:lbrak() pats:maybe_sequence_pattern()? r:rbrak() { MatchPattern::Sequence( make_list_pattern(Some(l), pats.unwrap_or_default(), Some(r)) @@ -671,17 +676,17 @@ parser! { MatchPattern::Sequence(make_tuple_pattern(l, pats.unwrap_or_default(), r)) } - rule open_sequence_pattern() -> Vec> + rule open_sequence_pattern() -> Vec> = pat:maybe_star_pattern() c:comma() pats:maybe_sequence_pattern()? { make_open_sequence_pattern(pat, c, pats.unwrap_or_default()) } - rule maybe_sequence_pattern() -> Vec> + rule maybe_sequence_pattern() -> Vec> = pats:separated_trailer(, ) { comma_separate(pats.0, pats.1, pats.2) } - rule maybe_star_pattern() -> StarrableMatchSequenceElement<'a> + rule maybe_star_pattern() -> StarrableMatchSequenceElement<'input, 'a> = s:star_pattern() { StarrableMatchSequenceElement::Starred(s) } / p:pattern() { StarrableMatchSequenceElement::Simple( @@ -689,11 +694,11 @@ parser! { ) } - rule star_pattern() -> MatchStar<'a> + rule star_pattern() -> MatchStar<'input, 'a> = star:lit("*") t:pattern_capture_target() {make_match_star(star, Some(t))} / star:lit("*") t:wildcard_pattern() { make_match_star(star, None) } - rule mapping_pattern() -> MatchPattern<'a> + rule mapping_pattern() -> MatchPattern<'input, 'a> = l:lbrace() r:rbrace() { make_match_mapping(l, vec![], None, None, None, None, r) } @@ -708,20 +713,20 @@ parser! { make_match_mapping(l, items, trail, None, None, None, r) } - rule items_pattern() -> Vec> + rule items_pattern() -> Vec> = pats:separated(, ) { comma_separate(pats.0, pats.1, None) } - rule key_value_pattern() -> MatchMappingElement<'a> + rule key_value_pattern() -> MatchMappingElement<'input, 'a> = key:(literal_expr() / a:attr() {a.into()}) colon:lit(":") pat:pattern() { make_match_mapping_element(key, colon, pat) } - rule double_star_pattern() -> (TokenRef<'a>, Name<'a>) + rule double_star_pattern() -> (TokenRef<'input, 'a>, Name<'input, 'a>) = star:lit("**") n:pattern_capture_target() { (star, n) } - rule class_pattern() -> MatchPattern<'a> + rule class_pattern() -> MatchPattern<'input, 'a> = cls:name_or_attr() l:lit("(") r:lit(")") { make_class_pattern(cls, l, vec![], None, vec![], None, r) } @@ -736,17 +741,17 @@ parser! { make_class_pattern(cls, l, pats, Some(c), kwds, trail, r) } - rule positional_patterns() -> Vec> + rule positional_patterns() -> Vec> = pats:separated(, ) { comma_separate(pats.0, pats.1, None) } - rule keyword_patterns() -> Vec> + rule keyword_patterns() -> Vec> = pats:separated(, ) { comma_separate(pats.0, pats.1, None) } - rule keyword_pattern() -> MatchKeywordElement<'a> + rule keyword_pattern() -> MatchKeywordElement<'input, 'a> = arg:name() eq:lit("=") value:pattern() { make_match_keyword_element(arg, eq, value) } @@ -754,17 +759,17 @@ parser! { // Expressions #[cache] - rule expression() -> Expression<'a> + rule expression() -> Expression<'input, 'a> = _conditional_expression() / lambdef() - rule _conditional_expression() -> Expression<'a> + rule _conditional_expression() -> Expression<'input, 'a> = body:disjunction() i:lit("if") test:disjunction() e:lit("else") oe:expression() { Expression::IfExp(Box::new(make_ifexp(body, i, test, e, oe))) } / disjunction() - rule yield_expr() -> Expression<'a> + rule yield_expr() -> Expression<'input, 'a> = y:lit("yield") f:lit("from") a:expression() { Expression::Yield(Box::new(make_yield(y, Some(f), Some(a)))) } @@ -772,7 +777,7 @@ parser! { Expression::Yield(Box::new(make_yield(y, None, a))) } - rule star_expressions() -> Expression<'a> + rule star_expressions() -> Expression<'input, 'a> = first:star_expression() rest:(comma:comma() e:star_expression() { (comma, expr_to_element(e)) })+ comma:comma()? { @@ -784,45 +789,45 @@ parser! { / star_expression() #[cache] - rule star_expression() -> Expression<'a> + rule star_expression() -> Expression<'input, 'a> = star:lit("*") e:bitwise_or() { Expression::StarredElement(Box::new(make_starred_element(star, expr_to_element(e)))) } / expression() - rule star_named_expressions() -> Vec> + rule star_named_expressions() -> Vec> = exps:separated_trailer(, ) { comma_separate(exps.0, exps.1, exps.2) } - rule star_named_expression() -> Element<'a> + rule star_named_expression() -> Element<'input, 'a> = star:lit("*") e:bitwise_or() { Element::Starred(Box::new(make_starred_element(star, expr_to_element(e)))) } / e:named_expression() { expr_to_element(e) } - rule named_expression() -> Expression<'a> + rule named_expression() -> Expression<'input, 'a> = a:name() op:lit(":=") b:expression() { Expression::NamedExpr(Box::new(make_named_expr(a, op, b))) } / e:expression() !lit(":=") { e } #[cache] - rule disjunction() -> Expression<'a> + rule disjunction() -> Expression<'input, 'a> = a:conjunction() b:(or:lit("or") inner:conjunction() { (or, inner) })+ {? make_boolean_op(a, b).map_err(|e| "expected disjunction") } / conjunction() #[cache] - rule conjunction() -> Expression<'a> + rule conjunction() -> Expression<'input, 'a> = a:inversion() b:(and:lit("and") inner:inversion() { (and, inner) })+ {? make_boolean_op(a, b).map_err(|e| "expected conjunction") } / inversion() #[cache] - rule inversion() -> Expression<'a> + rule inversion() -> Expression<'input, 'a> = not:lit("not") a:inversion() {? make_unary_op(not, a).map_err(|e| "expected inversion") } @@ -831,14 +836,14 @@ parser! { // Comparison operators #[cache] - rule comparison() -> Expression<'a> + rule comparison() -> Expression<'input, 'a> = a:bitwise_or() b:compare_op_bitwise_or_pair()+ { make_comparison(a, b) } / bitwise_or() // This implementation diverges slightly from CPython (3.9) to avoid bloating // the parser cache and increase readability. #[cache] - rule compare_op_bitwise_or_pair() -> (CompOp<'a>, Expression<'a>) + rule compare_op_bitwise_or_pair() -> (CompOp<'input, 'a>, Expression<'input, 'a>) = _op_bitwise_or("==") / _op_bitwise_or("!=") // TODO: support barry_as_flufl / _op_bitwise_or("<=") @@ -850,14 +855,14 @@ parser! { / _op_bitwise_or2("is", "not") / _op_bitwise_or("is") - rule _op_bitwise_or(o: &'static str) -> (CompOp<'a>, Expression<'a>) + rule _op_bitwise_or(o: &'static str) -> (CompOp<'input, 'a>, Expression<'input, 'a>) = op:lit(o) e:bitwise_or() {? make_comparison_operator(op) .map(|op| (op, e)) .map_err(|_| "comparison") } - rule _op_bitwise_or2(first: &'static str, second: &'static str) -> (CompOp<'a>, Expression<'a>) + rule _op_bitwise_or2(first: &'static str, second: &'static str) -> (CompOp<'input, 'a>, Expression<'input, 'a>) = f:lit(first) s:lit(second) e:bitwise_or() {? make_comparison_operator_2(f, s) .map(|op| (op, e)) @@ -865,28 +870,28 @@ parser! { } #[cache_left_rec] - rule bitwise_or() -> Expression<'a> + rule bitwise_or() -> Expression<'input, 'a> = a:bitwise_or() op:lit("|") b:bitwise_xor() {? make_binary_op(a, op, b).map_err(|e| "expected bitwise_or") } / bitwise_xor() #[cache_left_rec] - rule bitwise_xor() -> Expression<'a> + rule bitwise_xor() -> Expression<'input, 'a> = a:bitwise_xor() op:lit("^") b:bitwise_and() {? make_binary_op(a, op, b).map_err(|e| "expected bitwise_xor") } / bitwise_and() #[cache_left_rec] - rule bitwise_and() -> Expression<'a> + rule bitwise_and() -> Expression<'input, 'a> = a:bitwise_and() op:lit("&") b:shift_expr() {? make_binary_op(a, op, b).map_err(|e| "expected bitwise_and") } / shift_expr() #[cache_left_rec] - rule shift_expr() -> Expression<'a> + rule shift_expr() -> Expression<'input, 'a> = a:shift_expr() op:lit("<<") b:sum() {? make_binary_op(a, op, b).map_err(|e| "expected shift_expr") } @@ -896,7 +901,7 @@ parser! { / sum() #[cache_left_rec] - rule sum() -> Expression<'a> + rule sum() -> Expression<'input, 'a> = a:sum() op:lit("+") b:term() {? make_binary_op(a, op, b).map_err(|e| "expected sum") } @@ -906,7 +911,7 @@ parser! { / term() #[cache_left_rec] - rule term() -> Expression<'a> + rule term() -> Expression<'input, 'a> = a:term() op:lit("*") b:factor() {? make_binary_op(a, op, b).map_err(|e| "expected term") } @@ -925,7 +930,7 @@ parser! { / factor() #[cache] - rule factor() -> Expression<'a> + rule factor() -> Expression<'input, 'a> = op:lit("+") a:factor() {? make_unary_op(op, a).map_err(|e| "expected factor") } @@ -937,7 +942,7 @@ parser! { } / power() - rule power() -> Expression<'a> + rule power() -> Expression<'input, 'a> = a:await_primary() op:lit("**") b:factor() {? make_binary_op(a, op, b).map_err(|e| "expected power") } @@ -945,14 +950,14 @@ parser! { // Primary elements - rule await_primary() -> Expression<'a> + rule await_primary() -> Expression<'input, 'a> = aw:tok(AWAIT, "AWAIT") e:primary() { Expression::Await(Box::new(make_await(aw, e))) } / primary() #[cache_left_rec] - rule primary() -> Expression<'a> + rule primary() -> Expression<'input, 'a> = v:primary() dot:lit(".") attr:name() { Expression::Attribute(Box::new(make_attribute(v, dot, attr))) } @@ -967,20 +972,20 @@ parser! { } / atom() - rule slices() -> Vec> + rule slices() -> Vec> = s:slice() !lit(",") { vec![SubscriptElement { slice: s, comma: None }] } / slices:separated_trailer(, ) { make_slices(slices.0, slices.1, slices.2) } - rule slice() -> BaseSlice<'a> + rule slice() -> BaseSlice<'input, 'a> = l:expression()? col:lit(":") u:expression()? rest:(c:lit(":") s:expression()? {(c, s)})? { make_slice(l, col, u, rest) } / v:expression() { make_index(v) } - rule atom() -> Expression<'a> + rule atom() -> Expression<'input, 'a> = n:name() { Expression::Name(Box::new(n)) } / n:lit("True") { Expression::Name(Box::new(make_name(n))) } / n:lit("False") { Expression::Name(Box::new(make_name(n))) } @@ -992,26 +997,26 @@ parser! { / &lit("{") e:(dict() / set() / dictcomp() / setcomp()) {e} / lit("...") { Expression::Ellipsis(Box::new(Ellipsis {lpar: vec![], rpar: vec![]}))} - rule group() -> Expression<'a> + rule group() -> Expression<'input, 'a> = lpar:lpar() e:(yield_expr() / named_expression()) rpar:rpar() { e.with_parens(lpar, rpar) } // Lambda functions - rule lambdef() -> Expression<'a> + rule lambdef() -> Expression<'input, 'a> = kw:lit("lambda") p:lambda_params()? c:lit(":") b:expression() { Expression::Lambda(Box::new(make_lambda(kw, p.unwrap_or_default(), c, b))) } - rule lambda_params() -> Parameters<'a> + rule lambda_params() -> Parameters<'input, 'a> = lambda_parameters() // lambda_parameters etc. duplicates parameters but without annotations or type // comments, and if there's no comma after a parameter, we expect a colon, not a // close parenthesis. - rule lambda_parameters() -> Parameters<'a> + rule lambda_parameters() -> Parameters<'input, 'a> = a:lambda_slash_no_default() b:lambda_param_no_default()* c:lambda_param_with_default()* d:lambda_star_etc()? { make_parameters(Some(a), concat(b, c), d) @@ -1031,7 +1036,7 @@ parser! { make_parameters(None, vec![], Some(d)) } - rule lambda_slash_no_default() -> (Vec>, ParamSlash<'a>) + rule lambda_slash_no_default() -> (Vec>, ParamSlash<'input, 'a>) = a:lambda_param_no_default()+ slash:lit("/") com:comma() { (a, ParamSlash { comma: Some(com) } ) } @@ -1039,7 +1044,7 @@ parser! { (a, ParamSlash { comma: None }) } - rule lambda_slash_with_default() -> (Vec>, ParamSlash<'a>) + rule lambda_slash_with_default() -> (Vec>, ParamSlash<'input, 'a>) = a:lambda_param_no_default()* b:lambda_param_with_default()+ slash:lit("/") c:comma(){ (concat(a, b), ParamSlash { comma: Some(c) }) } @@ -1047,7 +1052,7 @@ parser! { (concat(a, b), ParamSlash { comma: None }) } - rule lambda_star_etc() -> StarEtc<'a> + rule lambda_star_etc() -> StarEtc<'input, 'a> = star:lit("*") a:lambda_param_no_default() b:lambda_param_maybe_default()* kw:lambda_kwds()? { StarEtc(Some(StarArg::Param( @@ -1059,18 +1064,18 @@ parser! { } / kw:lambda_kwds() { StarEtc(None, vec![], Some(kw)) } - rule lambda_kwds() -> Param<'a> + rule lambda_kwds() -> Param<'input, 'a> = star:lit("**") a:lambda_param_no_default() { add_param_star(a, star) } - rule lambda_param_no_default() -> Param<'a> + rule lambda_param_no_default() -> Param<'input, 'a> = a:lambda_param() c:lit(",") { add_param_default(a, None, Some(c)) } / a:lambda_param() &lit(":") {a} - rule lambda_param_with_default() -> Param<'a> + rule lambda_param_with_default() -> Param<'input, 'a> = a:lambda_param() def:default() c:lit(",") { add_param_default(a, Some(def), Some(c)) } @@ -1078,7 +1083,7 @@ parser! { add_param_default(a, Some(def), None) } - rule lambda_param_maybe_default() -> Param<'a> + rule lambda_param_maybe_default() -> Param<'input, 'a> = a:lambda_param() def:default()? c:lit(",") { add_param_default(a, def, Some(c)) } @@ -1086,25 +1091,25 @@ parser! { add_param_default(a, def, None) } - rule lambda_param() -> Param<'a> + rule lambda_param() -> Param<'input, 'a> = name:name() { Param { name, ..Default::default() } } // Literals - rule strings() -> String<'a> + rule strings() -> String<'input, 'a> = s:(str:tok(STRING, "STRING") t:&_ {(make_string(str), t)} / str:fstring() t:&_ {(String::Formatted(str), t)})+ { make_strings(s) } - rule list() -> Expression<'a> + rule list() -> Expression<'input, 'a> = lbrak:lbrak() e:star_named_expressions()? rbrak:rbrak() { Expression::List(Box::new( make_list(lbrak, e.unwrap_or_default(), rbrak)) ) } - rule tuple() -> Expression<'a> + rule tuple() -> Expression<'input, 'a> = lpar:lpar() first:star_named_expression() &lit(",") rest:(c:comma() e:star_named_expression() {(c, e)})* trailing_comma:comma()? rpar:rpar() { @@ -1114,42 +1119,42 @@ parser! { } / lpar:lpar() rpar:lit(")") { Expression::Tuple(Box::new(Tuple::default().with_parens( - lpar, RightParen { whitespace_before: Default::default(), rpar_tok: rpar } + lpar, RightParen { rpar_tok: rpar } )))} - rule set() -> Expression<'a> + rule set() -> Expression<'input, 'a> = lbrace:lbrace() e:star_named_expressions()? rbrace:rbrace() { Expression::Set(Box::new(make_set(lbrace, e.unwrap_or_default(), rbrace))) } // Dicts - rule dict() -> Expression<'a> + rule dict() -> Expression<'input, 'a> = lbrace:lbrace() els:double_starred_keypairs()? rbrace:rbrace() { Expression::Dict(Box::new(make_dict(lbrace, els.unwrap_or_default(), rbrace))) } - rule double_starred_keypairs() -> Vec> + rule double_starred_keypairs() -> Vec> = pairs:separated_trailer(, ) { make_double_starred_keypairs(pairs.0, pairs.1, pairs.2) } - rule double_starred_kvpair() -> DictElement<'a> + rule double_starred_kvpair() -> DictElement<'input, 'a> = s:lit("**") e:bitwise_or() { DictElement::Starred(make_double_starred_element(s, e)) } / k:kvpair() { make_dict_element(k) } - rule kvpair() -> (Expression<'a>, TokenRef<'a>, Expression<'a>) + rule kvpair() -> (Expression<'input, 'a>, TokenRef<'input, 'a>, Expression<'input, 'a>) = k:expression() colon:lit(":") v:expression() { (k, colon, v) } // Comprehensions & generators - rule for_if_clauses() -> CompFor<'a> + rule for_if_clauses() -> CompFor<'input, 'a> = c:for_if_clause()+ { merge_comp_fors(c) } - rule for_if_clause() -> CompFor<'a> + rule for_if_clause() -> CompFor<'input, 'a> = asy:_async() f:lit("for") tgt:star_targets() i:lit("in") iter:disjunction() ifs:_comp_if()* { make_for_if(Some(asy), f, tgt, i, iter, ifs) @@ -1159,42 +1164,42 @@ parser! { make_for_if(None, f, tgt, i, iter, ifs) } - rule _comp_if() -> CompIf<'a> + rule _comp_if() -> CompIf<'input, 'a> = kw:lit("if") cond:disjunction() { make_comp_if(kw, cond) } - rule listcomp() -> Expression<'a> + rule listcomp() -> Expression<'input, 'a> = lbrak:lbrak() elt:named_expression() comp:for_if_clauses() rbrak:rbrak() { Expression::ListComp(Box::new(make_list_comp(lbrak, elt, comp, rbrak))) } - rule setcomp() -> Expression<'a> + rule setcomp() -> Expression<'input, 'a> = l:lbrace() elt:named_expression() comp:for_if_clauses() r:rbrace() { Expression::SetComp(Box::new(make_set_comp(l, elt, comp, r))) } - rule genexp() -> GeneratorExp<'a> + rule genexp() -> GeneratorExp<'input, 'a> = lpar:lpar() g:_bare_genexp() rpar:rpar() { g.with_parens(lpar, rpar) } - rule _bare_genexp() -> GeneratorExp<'a> + rule _bare_genexp() -> GeneratorExp<'input, 'a> = elt:named_expression() comp:for_if_clauses() { make_bare_genexp(elt, comp) } - rule dictcomp() -> Expression<'a> + rule dictcomp() -> Expression<'input, 'a> = lbrace:lbrace() elt:kvpair() comp:for_if_clauses() rbrace:rbrace() { Expression::DictComp(Box::new(make_dict_comp(lbrace, elt, comp, rbrace))) } // Function call arguments - rule arguments() -> Vec> + rule arguments() -> Vec> = a:args() trail:comma()? &lit(")") {add_arguments_trailing_comma(a, trail)} - rule args() -> Vec> + rule args() -> Vec> = first:_posarg() rest:(c:comma() a:_posarg() {(c, a)})* kw:(c:comma() k:kwargs() {(c, k)})? { @@ -1206,11 +1211,11 @@ parser! { } / kwargs() - rule _posarg() -> Arg<'a> + rule _posarg() -> Arg<'input, 'a> = a:(starred_expression() / e:named_expression() { make_arg(e) }) !lit("=") { a } - rule kwargs() -> Vec> + rule kwargs() -> Vec> = sitems:separated(, ) scomma:comma() ditems:separated(, ) { @@ -1226,18 +1231,18 @@ parser! { comma_separate(items.0, items.1, None) } - rule starred_expression() -> Arg<'a> + rule starred_expression() -> Arg<'input, 'a> = star:lit("*") e:expression() { make_star_arg(star, e) } - rule kwarg_or_starred() -> Arg<'a> + rule kwarg_or_starred() -> Arg<'input, 'a> = _kwarg() / starred_expression() - rule kwarg_or_double_starred() -> Arg<'a> + rule kwarg_or_double_starred() -> Arg<'input, 'a> = _kwarg() / star:lit("**") e:expression() { make_star_arg(star, e) } - rule _kwarg() -> Arg<'a> + rule _kwarg() -> Arg<'input, 'a> = n:name() eq:lit("=") v:expression() { make_kwarg(n, eq, v) } @@ -1245,7 +1250,7 @@ parser! { // Assignment targets // Generic targets - rule star_targets() -> AssignTargetExpression<'a> + rule star_targets() -> AssignTargetExpression<'input, 'a> = a:star_target() !lit(",") {a} / targets:separated_trailer(, ) { AssignTargetExpression::Tuple(Box::new( @@ -1253,14 +1258,14 @@ parser! { )) } - rule star_targets_list_seq() -> Vec> + rule star_targets_list_seq() -> Vec> = targets:separated_trailer(, ) { comma_separate(targets.0, targets.1, targets.2) } // This differs from star_targets below because it requires at least two items // in the tuple - rule star_targets_tuple_seq() -> Tuple<'a> + rule star_targets_tuple_seq() -> Tuple<'input, 'a> = first:(t:star_target() {assign_target_to_element(t)}) rest:(c:comma() t:star_target() {(c, assign_target_to_element(t))})+ trail:comma()? { @@ -1271,7 +1276,7 @@ parser! { } #[cache] - rule star_target() -> AssignTargetExpression<'a> + rule star_target() -> AssignTargetExpression<'input, 'a> = star:lit("*") !lit("*") t:star_target() { AssignTargetExpression::StarredElement(Box::new( make_starred_element(star, assign_target_to_element(t)) @@ -1280,7 +1285,7 @@ parser! { / target_with_star_atom() #[cache] - rule target_with_star_atom() -> AssignTargetExpression<'a> + rule target_with_star_atom() -> AssignTargetExpression<'input, 'a> = a:t_primary() dot:lit(".") n:name() !t_lookahead() { AssignTargetExpression::Attribute(Box::new(make_attribute(a, dot, n))) } @@ -1291,7 +1296,7 @@ parser! { } / a:star_atom() {a} - rule star_atom() -> AssignTargetExpression<'a> + rule star_atom() -> AssignTargetExpression<'input, 'a> = a:name() { AssignTargetExpression::Name(Box::new(a)) } / lpar:lpar() a:target_with_star_atom() rpar:rpar() { a.with_parens(lpar, rpar) } / lpar:lpar() a:star_targets_tuple_seq()? rpar:rpar() { @@ -1305,12 +1310,12 @@ parser! { )) } - rule single_target() -> AssignTargetExpression<'a> + rule single_target() -> AssignTargetExpression<'input, 'a> = single_subscript_attribute_target() / n:name() { AssignTargetExpression::Name(Box::new(n)) } / lpar:lpar() t:single_target() rpar:rpar() { t.with_parens(lpar, rpar) } - rule single_subscript_attribute_target() -> AssignTargetExpression<'a> + rule single_subscript_attribute_target() -> AssignTargetExpression<'input, 'a> = a:t_primary() dot:lit(".") n:name() !t_lookahead() { AssignTargetExpression::Attribute(Box::new(make_attribute(a, dot, n))) } @@ -1322,7 +1327,7 @@ parser! { #[cache_left_rec] - rule t_primary() -> Expression<'a> + rule t_primary() -> Expression<'input, 'a> = value:t_primary() dot:lit(".") attr:name() &t_lookahead() { Expression::Attribute(Box::new(make_attribute(value, dot, attr))) } @@ -1342,12 +1347,12 @@ parser! { // Targets for del statements - rule del_targets() -> Vec> + rule del_targets() -> Vec> = t:separated_trailer(, ) { comma_separate(t.0, t.1, t.2) } - rule del_target() -> DelTargetExpression<'a> + rule del_target() -> DelTargetExpression<'input, 'a> = a:t_primary() d:lit(".") n:name() !t_lookahead() { DelTargetExpression::Attribute(Box::new(make_attribute(a, d, n))) } @@ -1358,7 +1363,7 @@ parser! { } / del_t_atom() - rule del_t_atom() -> DelTargetExpression<'a> + rule del_t_atom() -> DelTargetExpression<'input, 'a> = n:name() { DelTargetExpression::Name(Box::new(n)) } / l:lpar() d:del_target() r:rpar() { d.with_parens(l, r) } / l:lpar() d:del_targets()? r:rpar() { @@ -1372,19 +1377,19 @@ parser! { // F-strings - rule fstring() -> FormattedString<'a> + rule fstring() -> FormattedString<'input, 'a> = start:tok(FStringStart, "f\"") parts:(_f_string() / _f_replacement())* end:tok(FStringEnd, "\"") { make_fstring(start.string, parts, end.string) } - rule _f_string() -> FormattedStringContent<'a> + rule _f_string() -> FormattedStringContent<'input, 'a> = t:tok(FStringString, "f-string contents") { - FormattedStringContent::Text(FormattedStringText { value: t.string }) + FormattedStringContent::Text(make_fstringtext(t.string)) } - rule _f_replacement() -> FormattedStringContent<'a> + rule _f_replacement() -> FormattedStringContent<'input, 'a> = lb:lit("{") e:_f_expr() eq:lit("=")? conv:(t:lit("!") c:_f_conversion() {(t,c)})? spec:(t:lit(":") s:_f_spec() {(t,s)})? @@ -1394,7 +1399,7 @@ parser! { )) } - rule _f_expr() -> Expression<'a> + rule _f_expr() -> Expression<'input, 'a> = (g:_bare_genexp() {Expression::GeneratorExp(Box::new(g))}) / star_expressions() / yield_expr() @@ -1402,49 +1407,49 @@ parser! { rule _f_conversion() -> &'a str = lit("r") {"r"} / lit("s") {"s"} / lit("a") {"a"} - rule _f_spec() -> Vec> + rule _f_spec() -> Vec> = (_f_string() / _f_replacement())* // CST helpers - rule comma() -> Comma<'a> + rule comma() -> Comma<'input, 'a> = c:lit(",") { make_comma(c) } - rule dots() -> Vec> + rule dots() -> Vec> = ds:((dot:lit(".") { make_dot(dot) })+ / tok:lit("...") { - vec![make_dot(tok.clone()), make_dot(tok.clone()), make_dot(tok.clone())]} + vec![make_dot(tok), make_dot(tok), make_dot(tok)]} )+ { ds.into_iter().flatten().collect() } - rule lpar() -> LeftParen<'a> + rule lpar() -> LeftParen<'input, 'a> = a:lit("(") { make_lpar(a) } - rule rpar() -> RightParen<'a> + rule rpar() -> RightParen<'input, 'a> = a:lit(")") { make_rpar(a) } - rule lbrak() -> LeftSquareBracket<'a> + rule lbrak() -> LeftSquareBracket<'input, 'a> = tok:lit("[") { make_left_bracket(tok) } - rule rbrak() -> RightSquareBracket<'a> + rule rbrak() -> RightSquareBracket<'input, 'a> = tok:lit("]") { make_right_bracket(tok) } - rule lbrace() -> LeftCurlyBrace<'a> + rule lbrace() -> LeftCurlyBrace<'input, 'a> = tok:lit("{") { make_left_brace(tok) } - rule rbrace() -> RightCurlyBrace<'a> + rule rbrace() -> RightCurlyBrace<'input, 'a> = tok:lit("}") { make_right_brace(tok) } /// matches any token, not just whitespace - rule _() -> TokenRef<'a> + rule _() -> TokenRef<'input, 'a> = [t] { t } - rule lit(lit: &'static str) -> TokenRef<'a> + rule lit(lit: &'static str) -> TokenRef<'input, 'a> = [t] {? if t.string == lit { Ok(t) } else { Err(lit) } } - rule tok(tok: TokType, err: &'static str) -> TokenRef<'a> + rule tok(tok: TokType, err: &'static str) -> TokenRef<'input, 'a> = [t] {? if t.r#type == tok { Ok(t) } else { Err(err) } } - rule name() -> Name<'a> + rule name() -> Name<'input, 'a> = !( lit("False") / lit("None") / lit("True") / lit("and") / lit("as") / lit("assert") / lit("async") / lit("await") / lit("break") / lit("class") / lit("continue") / lit("def") / lit("del") / lit("elif") / lit("else") / lit("except") / lit("finally") / lit("for") / lit("from") / lit("global") / lit("if") / lit("import") @@ -1453,7 +1458,7 @@ parser! { ) t:tok(NameTok, "NAME") {make_name(t)} - rule _async() -> TokenRef<'a> + rule _async() -> TokenRef<'input, 'a> = tok(Async, "ASYNC") rule separated_trailer(el: rule, sep: rule) -> (El, Vec<(Sep, El)>, Option) @@ -1481,20 +1486,18 @@ parser! { } #[allow(clippy::too_many_arguments)] -fn make_function_def<'a>( - async_tok: Option>, - def_tok: TokenRef<'a>, - name: Name<'a>, - open_paren_tok: TokenRef<'a>, - params: Option>, - close_paren_tok: TokenRef<'a>, - returns: Option>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, -) -> FunctionDef<'a> { - let asynchronous = async_tok.as_ref().map(|_| Asynchronous { - whitespace_after: Default::default(), - }); +fn make_function_def<'input, 'a>( + async_tok: Option>, + def_tok: TokenRef<'input, 'a>, + name: Name<'input, 'a>, + open_paren_tok: TokenRef<'input, 'a>, + params: Option>, + close_paren_tok: TokenRef<'input, 'a>, + returns: Option>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, +) -> FunctionDef<'input, 'a> { + let asynchronous = async_tok.as_ref().map(|_| make_async()); FunctionDef { name, params: params.unwrap_or_default(), @@ -1502,12 +1505,6 @@ fn make_function_def<'a>( decorators: Default::default(), returns, asynchronous, - leading_lines: Default::default(), - lines_after_decorators: vec![], - whitespace_after_def: Default::default(), - whitespace_after_name: Default::default(), - whitespace_before_colon: Default::default(), - whitespace_before_params: Default::default(), async_tok, def_tok, open_paren_tok, @@ -1516,25 +1513,22 @@ fn make_function_def<'a>( } } -fn make_decorator<'a>( - at_tok: TokenRef<'a>, - name: Expression<'a>, - newline_tok: TokenRef<'a>, -) -> Decorator<'a> { +fn make_decorator<'input, 'a>( + at_tok: TokenRef<'input, 'a>, + name: Expression<'input, 'a>, + newline_tok: TokenRef<'input, 'a>, +) -> Decorator<'input, 'a> { Decorator { decorator: name, - leading_lines: Default::default(), - whitespace_after_at: Default::default(), - trailing_whitespace: Default::default(), newline_tok, at_tok, } } -fn make_comparison<'a>( - head: Expression<'a>, - tail: Vec<(CompOp<'a>, Expression<'a>)>, -) -> Expression<'a> { +fn make_comparison<'input, 'a>( + head: Expression<'input, 'a>, + tail: Vec<(CompOp<'input, 'a>, Expression<'input, 'a>)>, +) -> Expression<'input, 'a> { let mut comparisons = vec![]; for (operator, e) in tail { comparisons.push(ComparisonTarget { @@ -1550,74 +1544,32 @@ fn make_comparison<'a>( })) } -fn make_comparison_operator(tok: TokenRef) -> Result { - let whitespace_before = Default::default(); - let whitespace_after = Default::default(); +fn make_comparison_operator<'input, 'a>( + tok: TokenRef<'input, 'a>, +) -> Result<'a, CompOp<'input, 'a>> { match tok.string { - "<" => Ok(CompOp::LessThan { - whitespace_after, - whitespace_before, - tok, - }), - ">" => Ok(CompOp::GreaterThan { - whitespace_after, - whitespace_before, - tok, - }), - "<=" => Ok(CompOp::LessThanEqual { - whitespace_after, - whitespace_before, - tok, - }), - ">=" => Ok(CompOp::GreaterThanEqual { - whitespace_after, - whitespace_before, - tok, - }), - "==" => Ok(CompOp::Equal { - whitespace_after, - whitespace_before, - tok, - }), - "!=" => Ok(CompOp::NotEqual { - whitespace_after, - whitespace_before, - tok, - }), - "in" => Ok(CompOp::In { - whitespace_after, - whitespace_before, - tok, - }), - "is" => Ok(CompOp::Is { - whitespace_after, - whitespace_before, - tok, - }), + "<" => Ok(CompOp::LessThan { tok }), + ">" => Ok(CompOp::GreaterThan { tok }), + "<=" => Ok(CompOp::LessThanEqual { tok }), + ">=" => Ok(CompOp::GreaterThanEqual { tok }), + "==" => Ok(CompOp::Equal { tok }), + "!=" => Ok(CompOp::NotEqual { tok }), + "in" => Ok(CompOp::In { tok }), + "is" => Ok(CompOp::Is { tok }), _ => Err(ParserError::OperatorError), } } -fn make_comparison_operator_2<'a>( - first: TokenRef<'a>, - second: TokenRef<'a>, -) -> Result<'a, CompOp<'a>> { - let whitespace_before = Default::default(); - let whitespace_between = Default::default(); - let whitespace_after = Default::default(); - +fn make_comparison_operator_2<'input, 'a>( + first: TokenRef<'input, 'a>, + second: TokenRef<'input, 'a>, +) -> Result<'a, CompOp<'input, 'a>> { match (first.string, second.string) { ("is", "not") => Ok(CompOp::IsNot { - whitespace_before, - whitespace_between, - whitespace_after, is_tok: first, not_tok: second, }), ("not", "in") => Ok(CompOp::NotIn { - whitespace_before, - whitespace_between, - whitespace_after, not_tok: first, in_tok: second, }), @@ -1625,10 +1577,10 @@ fn make_comparison_operator_2<'a>( } } -fn make_boolean_op<'a>( - head: Expression<'a>, - tail: Vec<(TokenRef<'a>, Expression<'a>)>, -) -> Result<'a, Expression<'a>> { +fn make_boolean_op<'input, 'a>( + head: Expression<'input, 'a>, + tail: Vec<(TokenRef<'input, 'a>, Expression<'input, 'a>)>, +) -> Result<'a, Expression<'input, 'a>> { if tail.is_empty() { return Ok(head); } @@ -1646,29 +1598,21 @@ fn make_boolean_op<'a>( Ok(expr) } -fn make_boolean_operator(tok: TokenRef) -> Result { - let whitespace_before = Default::default(); - let whitespace_after = Default::default(); +fn make_boolean_operator<'input, 'a>( + tok: TokenRef<'input, 'a>, +) -> Result<'a, BooleanOp<'input, 'a>> { match tok.string { - "and" => Ok(BooleanOp::And { - whitespace_after, - whitespace_before, - tok, - }), - "or" => Ok(BooleanOp::Or { - whitespace_after, - whitespace_before, - tok, - }), + "and" => Ok(BooleanOp::And { tok }), + "or" => Ok(BooleanOp::Or { tok }), _ => Err(ParserError::OperatorError), } } -fn make_binary_op<'a>( - left: Expression<'a>, - op: TokenRef<'a>, - right: Expression<'a>, -) -> Result<'a, Expression<'a>> { +fn make_binary_op<'input, 'a>( + left: Expression<'input, 'a>, + op: TokenRef<'input, 'a>, + right: Expression<'input, 'a>, +) -> Result<'a, Expression<'input, 'a>> { let operator = make_binary_operator(op)?; Ok(Expression::BinaryOperation(Box::new(BinaryOperation { left: Box::new(left), @@ -1679,81 +1623,29 @@ fn make_binary_op<'a>( }))) } -fn make_binary_operator(tok: TokenRef) -> Result { - let whitespace_before = Default::default(); - let whitespace_after = Default::default(); - +fn make_binary_operator<'input, 'a>(tok: TokenRef<'input, 'a>) -> Result<'a, BinaryOp<'input, 'a>> { match tok.string { - "+" => Ok(BinaryOp::Add { - whitespace_after, - whitespace_before, - tok, - }), - "-" => Ok(BinaryOp::Subtract { - whitespace_after, - whitespace_before, - tok, - }), - "*" => Ok(BinaryOp::Multiply { - whitespace_after, - whitespace_before, - tok, - }), - "/" => Ok(BinaryOp::Divide { - whitespace_after, - whitespace_before, - tok, - }), - "//" => Ok(BinaryOp::FloorDivide { - whitespace_after, - whitespace_before, - tok, - }), - "%" => Ok(BinaryOp::Modulo { - whitespace_after, - whitespace_before, - tok, - }), - "**" => Ok(BinaryOp::Power { - whitespace_after, - whitespace_before, - tok, - }), - "<<" => Ok(BinaryOp::LeftShift { - whitespace_after, - whitespace_before, - tok, - }), - ">>" => Ok(BinaryOp::RightShift { - whitespace_after, - whitespace_before, - tok, - }), - "|" => Ok(BinaryOp::BitOr { - whitespace_after, - whitespace_before, - tok, - }), - "&" => Ok(BinaryOp::BitAnd { - whitespace_after, - whitespace_before, - tok, - }), - "^" => Ok(BinaryOp::BitXor { - whitespace_after, - whitespace_before, - tok, - }), - "@" => Ok(BinaryOp::MatrixMultiply { - whitespace_after, - whitespace_before, - tok, - }), + "+" => Ok(BinaryOp::Add { tok }), + "-" => Ok(BinaryOp::Subtract { tok }), + "*" => Ok(BinaryOp::Multiply { tok }), + "/" => Ok(BinaryOp::Divide { tok }), + "//" => Ok(BinaryOp::FloorDivide { tok }), + "%" => Ok(BinaryOp::Modulo { tok }), + "**" => Ok(BinaryOp::Power { tok }), + "<<" => Ok(BinaryOp::LeftShift { tok }), + ">>" => Ok(BinaryOp::RightShift { tok }), + "|" => Ok(BinaryOp::BitOr { tok }), + "&" => Ok(BinaryOp::BitAnd { tok }), + "^" => Ok(BinaryOp::BitXor { tok }), + "@" => Ok(BinaryOp::MatrixMultiply { tok }), _ => Err(ParserError::OperatorError), } } -fn make_unary_op<'a>(op: TokenRef<'a>, tail: Expression<'a>) -> Result<'a, Expression<'a>> { +fn make_unary_op<'input, 'a>( + op: TokenRef<'input, 'a>, + tail: Expression<'input, 'a>, +) -> Result<'a, Expression<'input, 'a>> { let operator = make_unary_operator(op)?; Ok(Expression::UnaryOperation(Box::new(UnaryOperation { operator, @@ -1763,69 +1655,54 @@ fn make_unary_op<'a>(op: TokenRef<'a>, tail: Expression<'a>) -> Result<'a, Expre }))) } -fn make_unary_operator(tok: TokenRef) -> Result { - let whitespace_after = Default::default(); +fn make_unary_operator<'input, 'a>(tok: TokenRef<'input, 'a>) -> Result<'a, UnaryOp<'input, 'a>> { match tok.string { - "+" => Ok(UnaryOp::Plus { - whitespace_after, - tok, - }), - "-" => Ok(UnaryOp::Minus { - whitespace_after, - tok, - }), - "~" => Ok(UnaryOp::BitInvert { - whitespace_after, - tok, - }), - "not" => Ok(UnaryOp::Not { - whitespace_after, - tok, - }), + "+" => Ok(UnaryOp::Plus { tok }), + "-" => Ok(UnaryOp::Minus { tok }), + "~" => Ok(UnaryOp::BitInvert { tok }), + "not" => Ok(UnaryOp::Not { tok }), _ => Err(ParserError::OperatorError), } } -fn make_number(num: TokenRef) -> Expression { +fn make_number<'input, 'a>(num: TokenRef<'input, 'a>) -> Expression<'input, 'a> { super::numbers::parse_number(num.string) } -fn make_indented_block<'a>( - nl: TokenRef<'a>, - indent: TokenRef<'a>, - statements: Vec>, - dedent: TokenRef<'a>, -) -> Suite<'a> { +fn make_indented_block<'input, 'a>( + nl: TokenRef<'input, 'a>, + indent: TokenRef<'input, 'a>, + statements: Vec>, + dedent: TokenRef<'input, 'a>, +) -> Suite<'input, 'a> { Suite::IndentedBlock(IndentedBlock { body: statements, - header: Default::default(), indent: Default::default(), - footer: Default::default(), newline_tok: nl, indent_tok: indent, dedent_tok: dedent, }) } -struct SimpleStatementParts<'a> { - first_tok: TokenRef<'a>, // The first token of the first statement. Used for its whitespace - first_statement: SmallStatement<'a>, - rest: Vec<(TokenRef<'a>, SmallStatement<'a>)>, // semicolon, statement pairs - last_semi: Option>, - nl: TokenRef<'a>, +struct SimpleStatementParts<'input, 'a> { + first_tok: TokenRef<'input, 'a>, // The first token of the first statement. Used for its whitespace + first_statement: SmallStatement<'input, 'a>, + rest: Vec<(TokenRef<'input, 'a>, SmallStatement<'input, 'a>)>, // semicolon, statement pairs + last_semi: Option>, + nl: TokenRef<'input, 'a>, } -fn make_semicolon(tok: TokenRef) -> Semicolon { - Semicolon { - whitespace_before: Default::default(), - whitespace_after: Default::default(), - tok, - } +fn make_semicolon<'input, 'a>(tok: TokenRef<'input, 'a>) -> Semicolon<'input, 'a> { + Semicolon { tok } } -fn _make_simple_statement( - parts: SimpleStatementParts, -) -> (TokenRef, Vec, TokenRef) { +fn _make_simple_statement<'input, 'a>( + parts: SimpleStatementParts<'input, 'a>, +) -> ( + TokenRef<'input, 'a>, + Vec>, + TokenRef<'input, 'a>, +) { let mut body = vec![]; let mut current = parts.first_statement; @@ -1841,42 +1718,39 @@ fn _make_simple_statement( (parts.first_tok, body, parts.nl) } -fn make_simple_statement_suite(parts: SimpleStatementParts) -> Suite { +fn make_simple_statement_suite<'input, 'a>( + parts: SimpleStatementParts<'input, 'a>, +) -> Suite<'input, 'a> { let (first_tok, body, newline_tok) = _make_simple_statement(parts); Suite::SimpleStatementSuite(SimpleStatementSuite { body, - leading_whitespace: Default::default(), - trailing_whitespace: Default::default(), first_tok, newline_tok, }) } -fn make_simple_statement_line(parts: SimpleStatementParts) -> SimpleStatementLine { +fn make_simple_statement_line<'input, 'a>( + parts: SimpleStatementParts<'input, 'a>, +) -> SimpleStatementLine<'input, 'a> { let (first_tok, body, newline_tok) = _make_simple_statement(parts); SimpleStatementLine { body, - leading_lines: Default::default(), - trailing_whitespace: Default::default(), first_tok, newline_tok, } } -fn make_if<'a>( - if_tok: TokenRef<'a>, - cond: Expression<'a>, - colon_tok: TokenRef<'a>, - block: Suite<'a>, - orelse: Option>, +fn make_if<'input, 'a>( + if_tok: TokenRef<'input, 'a>, + cond: Expression<'input, 'a>, + colon_tok: TokenRef<'input, 'a>, + block: Suite<'input, 'a>, + orelse: Option>, is_elif: bool, -) -> If<'a> { +) -> If<'input, 'a> { If { - leading_lines: Default::default(), - whitespace_before_test: Default::default(), test: cond, - whitespace_after_test: Default::default(), body: block, orelse: orelse.map(Box::new), is_elif, @@ -1885,23 +1759,29 @@ fn make_if<'a>( } } -fn make_else<'a>(else_tok: TokenRef<'a>, colon_tok: TokenRef<'a>, block: Suite<'a>) -> Else<'a> { +fn make_else<'input, 'a>( + else_tok: TokenRef<'input, 'a>, + colon_tok: TokenRef<'input, 'a>, + block: Suite<'input, 'a>, +) -> Else<'input, 'a> { Else { - leading_lines: Default::default(), - whitespace_before_colon: Default::default(), body: block, else_tok, colon_tok, } } -struct StarEtc<'a>(Option>, Vec>, Option>); +struct StarEtc<'input, 'a>( + Option>, + Vec>, + Option>, +); -fn make_parameters<'a>( - posonly: Option<(Vec>, ParamSlash<'a>)>, - params: Vec>, - star_etc: Option>, -) -> Parameters<'a> { +fn make_parameters<'input, 'a>( + posonly: Option<(Vec>, ParamSlash<'input, 'a>)>, + params: Vec>, + star_etc: Option>, +) -> Parameters<'input, 'a> { let (posonly_params, posonly_ind) = match posonly { Some((a, b)) => (a, Some(b)), None => (vec![], None), @@ -1920,11 +1800,11 @@ fn make_parameters<'a>( } } -fn add_param_default<'a>( - param: Param<'a>, - def: Option<(AssignEqual<'a>, Expression<'a>)>, - comma_tok: Option>, -) -> Param<'a> { +fn add_param_default<'input, 'a>( + param: Param<'input, 'a>, + def: Option<(AssignEqual<'input, 'a>, Expression<'input, 'a>)>, + comma_tok: Option>, +) -> Param<'input, 'a> { let comma = comma_tok.map(make_comma); let (equal, default) = match def { @@ -1939,7 +1819,10 @@ fn add_param_default<'a>( } } -fn add_param_star<'a>(param: Param<'a>, star: TokenRef<'a>) -> Param<'a> { +fn add_param_star<'input, 'a>( + param: Param<'input, 'a>, + star: TokenRef<'input, 'a>, +) -> Param<'input, 'a> { let str = star.string; Param { star: Some(str), @@ -1948,30 +1831,22 @@ fn add_param_star<'a>(param: Param<'a>, star: TokenRef<'a>) -> Param<'a> { } } -fn make_assign_equal(tok: TokenRef) -> AssignEqual { - AssignEqual { - whitespace_before: Default::default(), - whitespace_after: Default::default(), - tok, - } +fn make_assign_equal<'input, 'a>(tok: TokenRef<'input, 'a>) -> AssignEqual<'input, 'a> { + AssignEqual { tok } } -fn make_comma(tok: TokenRef) -> Comma { - Comma { - whitespace_before: Default::default(), - whitespace_after: Default::default(), - tok, - } +fn make_comma<'input, 'a>(tok: TokenRef<'input, 'a>) -> Comma<'input, 'a> { + Comma { tok } } fn concat(a: Vec, b: Vec) -> Vec { a.into_iter().chain(b.into_iter()).collect() } -fn make_name_or_attr<'a>( - first_tok: Name<'a>, - mut tail: Vec<(TokenRef<'a>, Name<'a>)>, -) -> NameOrAttribute<'a> { +fn make_name_or_attr<'input, 'a>( + first_tok: Name<'input, 'a>, + mut tail: Vec<(TokenRef<'input, 'a>, Name<'input, 'a>)>, +) -> NameOrAttribute<'input, 'a> { if let Some((dot, name)) = tail.pop() { let dot = make_dot(dot); return NameOrAttribute::A(Box::new(Attribute { @@ -1986,25 +1861,21 @@ fn make_name_or_attr<'a>( } } -fn make_name(tok: TokenRef) -> Name { +fn make_name<'input, 'a>(tok: TokenRef<'input, 'a>) -> Name<'input, 'a> { Name { value: tok.string, ..Default::default() } } -fn make_dot(tok: TokenRef) -> Dot { - Dot { - whitespace_before: Default::default(), - whitespace_after: Default::default(), - tok, - } +fn make_dot<'input, 'a>(tok: TokenRef<'input, 'a>) -> Dot<'input, 'a> { + Dot { tok } } -fn make_import_alias<'a>( - name: NameOrAttribute<'a>, - asname: Option<(TokenRef<'a>, Name<'a>)>, -) -> ImportAlias<'a> { +fn make_import_alias<'input, 'a>( + name: NameOrAttribute<'input, 'a>, + asname: Option<(TokenRef<'input, 'a>, Name<'input, 'a>)>, +) -> ImportAlias<'input, 'a> { ImportAlias { name, asname: asname.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))), @@ -2012,28 +1883,26 @@ fn make_import_alias<'a>( } } -fn make_as_name<'a>(as_tok: TokenRef<'a>, name: AssignTargetExpression<'a>) -> AsName<'a> { - AsName { - name, - whitespace_before_as: Default::default(), - whitespace_after_as: Default::default(), - as_tok, - } +fn make_as_name<'input, 'a>( + as_tok: TokenRef<'input, 'a>, + name: AssignTargetExpression<'input, 'a>, +) -> AsName<'input, 'a> { + AsName { name, as_tok } } -type ParenthesizedImportNames<'a> = ( - Option>, - ImportNames<'a>, - Option>, +type ParenthesizedImportNames<'input, 'a> = ( + Option>, + ImportNames<'input, 'a>, + Option>, ); -fn make_import_from<'a>( - from_tok: TokenRef<'a>, - dots: Vec>, - module: Option>, - import_tok: TokenRef<'a>, - aliases: ParenthesizedImportNames<'a>, -) -> ImportFrom<'a> { +fn make_import_from<'input, 'a>( + from_tok: TokenRef<'input, 'a>, + dots: Vec>, + module: Option>, + import_tok: TokenRef<'input, 'a>, + aliases: ParenthesizedImportNames<'input, 'a>, +) -> ImportFrom<'input, 'a> { let (lpar, names, rpar) = aliases; ImportFrom { @@ -2043,27 +1912,26 @@ fn make_import_from<'a>( lpar, rpar, semicolon: None, - whitespace_after_from: Default::default(), - whitespace_after_import: Default::default(), - whitespace_before_import: Default::default(), from_tok, import_tok, } } -fn make_import<'a>(import_tok: TokenRef<'a>, names: Vec>) -> Import<'a> { +fn make_import<'input, 'a>( + import_tok: TokenRef<'input, 'a>, + names: Vec>, +) -> Import<'input, 'a> { Import { names, - whitespace_after_import: Default::default(), semicolon: None, import_tok, } } -fn make_import_from_as_names<'a>( - first: ImportAlias<'a>, - tail: Vec<(Comma<'a>, ImportAlias<'a>)>, -) -> Vec> { +fn make_import_from_as_names<'input, 'a>( + first: ImportAlias<'input, 'a>, + tail: Vec<(Comma<'input, 'a>, ImportAlias<'input, 'a>)>, +) -> Vec> { let mut ret = vec![]; let mut cur = first; for (comma, alias) in tail { @@ -2074,25 +1942,21 @@ fn make_import_from_as_names<'a>( ret } -fn make_lpar(tok: TokenRef) -> LeftParen { - LeftParen { - whitespace_after: Default::default(), - lpar_tok: tok, - } +fn make_lpar<'input, 'a>(tok: TokenRef<'input, 'a>) -> LeftParen<'input, 'a> { + LeftParen { lpar_tok: tok } } -fn make_rpar(tok: TokenRef) -> RightParen { - RightParen { - whitespace_before: Default::default(), - rpar_tok: tok, - } +fn make_rpar<'input, 'a>(tok: TokenRef<'input, 'a>) -> RightParen<'input, 'a> { + RightParen { rpar_tok: tok } } -fn make_module<'a>(body: Vec>, tok: TokenRef<'a>, encoding: &str) -> Module<'a> { +fn make_module<'input, 'a>( + body: Vec>, + tok: TokenRef<'input, 'a>, + encoding: &str, +) -> Module<'input, 'a> { Module { body, - header: Default::default(), - footer: Default::default(), eof_tok: tok, default_indent: " ", default_newline: "\n", @@ -2101,7 +1965,11 @@ fn make_module<'a>(body: Vec>, tok: TokenRef<'a>, encoding: &str) } } -fn make_attribute<'a>(value: Expression<'a>, dot: TokenRef<'a>, attr: Name<'a>) -> Attribute<'a> { +fn make_attribute<'input, 'a>( + value: Expression<'input, 'a>, + dot: TokenRef<'input, 'a>, + attr: Name<'input, 'a>, +) -> Attribute<'input, 'a> { let dot = make_dot(dot); Attribute { attr, @@ -2112,14 +1980,16 @@ fn make_attribute<'a>(value: Expression<'a>, dot: TokenRef<'a>, attr: Name<'a>) } } -fn make_starred_element<'a>(star_tok: TokenRef<'a>, rest: Element<'a>) -> StarredElement<'a> { +fn make_starred_element<'input, 'a>( + star_tok: TokenRef<'input, 'a>, + rest: Element<'input, 'a>, +) -> StarredElement<'input, 'a> { let value = match rest { Element::Simple { value, .. } => value, _ => panic!("Internal error while making starred element"), }; StarredElement { value: Box::new(value), - whitespace_before_value: Default::default(), lpar: Default::default(), rpar: Default::default(), comma: Default::default(), @@ -2127,7 +1997,9 @@ fn make_starred_element<'a>(star_tok: TokenRef<'a>, rest: Element<'a>) -> Starre } } -fn assign_target_to_element(expr: AssignTargetExpression) -> Element { +fn assign_target_to_element<'input, 'a>( + expr: AssignTargetExpression<'input, 'a>, +) -> Element<'input, 'a> { match expr { AssignTargetExpression::Attribute(a) => Element::Simple { value: Expression::Attribute(a), @@ -2153,18 +2025,13 @@ fn assign_target_to_element(expr: AssignTargetExpression) -> Element { } } -fn make_assignment<'a>( - lhs: Vec<(AssignTargetExpression<'a>, TokenRef<'a>)>, - rhs: Expression<'a>, -) -> Assign<'a> { +fn make_assignment<'input, 'a>( + lhs: Vec<(AssignTargetExpression<'input, 'a>, TokenRef<'input, 'a>)>, + rhs: Expression<'input, 'a>, +) -> Assign<'input, 'a> { let mut targets = vec![]; for (target, equal_tok) in lhs { - targets.push(AssignTarget { - target, - whitespace_before_equal: Default::default(), - whitespace_after_equal: Default::default(), - equal_tok, - }); + targets.push(AssignTarget { target, equal_tok }); } Assign { targets, @@ -2173,7 +2040,7 @@ fn make_assignment<'a>( } } -fn expr_to_element(expr: Expression) -> Element { +fn expr_to_element<'input, 'a>(expr: Expression<'input, 'a>) -> Element<'input, 'a> { match expr { Expression::StarredElement(inner_expr) => Element::Starred(inner_expr), _ => Element::Simple { @@ -2183,13 +2050,13 @@ fn expr_to_element(expr: Expression) -> Element { } } -fn make_tuple<'a>( - first: Element<'a>, - rest: Vec<(Comma<'a>, Element<'a>)>, - trailing_comma: Option>, - lpar: Option>, - rpar: Option>, -) -> Tuple<'a> { +fn make_tuple<'input, 'a>( + first: Element<'input, 'a>, + rest: Vec<(Comma<'input, 'a>, Element<'input, 'a>)>, + trailing_comma: Option>, + lpar: Option>, + rpar: Option>, +) -> Tuple<'input, 'a> { let elements = comma_separate(first, rest, trailing_comma); let lpar = lpar.map(|l| vec![l]).unwrap_or_default(); @@ -2202,7 +2069,10 @@ fn make_tuple<'a>( } } -fn make_tuple_from_elements<'a>(first: Element<'a>, mut rest: Vec>) -> Tuple<'a> { +fn make_tuple_from_elements<'input, 'a>( + first: Element<'input, 'a>, + mut rest: Vec>, +) -> Tuple<'input, 'a> { rest.insert(0, first); Tuple { elements: rest, @@ -2211,7 +2081,11 @@ fn make_tuple_from_elements<'a>(first: Element<'a>, mut rest: Vec>) } } -fn make_kwarg<'a>(name: Name<'a>, eq: TokenRef<'a>, value: Expression<'a>) -> Arg<'a> { +fn make_kwarg<'input, 'a>( + name: Name<'input, 'a>, + eq: TokenRef<'input, 'a>, + value: Expression<'input, 'a>, +) -> Arg<'input, 'a> { let equal = Some(make_assign_equal(eq)); let keyword = Some(name); Arg { @@ -2220,13 +2094,14 @@ fn make_kwarg<'a>(name: Name<'a>, eq: TokenRef<'a>, value: Expression<'a>) -> Ar equal, comma: None, star: "", - whitespace_after_star: Default::default(), - whitespace_after_arg: Default::default(), star_tok: None, } } -fn make_star_arg<'a>(star: TokenRef<'a>, expr: Expression<'a>) -> Arg<'a> { +fn make_star_arg<'input, 'a>( + star: TokenRef<'input, 'a>, + expr: Expression<'input, 'a>, +) -> Arg<'input, 'a> { let str = star.string; Arg { value: expr, @@ -2234,18 +2109,16 @@ fn make_star_arg<'a>(star: TokenRef<'a>, expr: Expression<'a>) -> Arg<'a> { equal: None, comma: None, star: str, - whitespace_after_star: Default::default(), - whitespace_after_arg: Default::default(), star_tok: Some(star), } } -fn make_call<'a>( - func: Expression<'a>, - lpar_tok: TokenRef<'a>, - args: Vec>, - rpar_tok: TokenRef<'a>, -) -> Call<'a> { +fn make_call<'input, 'a>( + func: Expression<'input, 'a>, + lpar_tok: TokenRef<'input, 'a>, + args: Vec>, + rpar_tok: TokenRef<'input, 'a>, +) -> Call<'input, 'a> { let lpar = vec![]; let rpar = vec![]; let func = Box::new(func); @@ -2255,14 +2128,15 @@ fn make_call<'a>( args, lpar, rpar, - whitespace_after_func: Default::default(), - whitespace_before_args: Default::default(), lpar_tok, rpar_tok, } } -fn make_genexp_call<'a>(func: Expression<'a>, mut genexp: GeneratorExp<'a>) -> Call<'a> { +fn make_genexp_call<'input, 'a>( + func: Expression<'input, 'a>, + mut genexp: GeneratorExp<'input, 'a>, +) -> Call<'input, 'a> { // func ( (genexp) ) // ^ // lpar_tok @@ -2285,53 +2159,43 @@ fn make_genexp_call<'a>(func: Expression<'a>, mut genexp: GeneratorExp<'a>) -> C equal: None, comma: None, star: "", - whitespace_after_star: Default::default(), - whitespace_after_arg: Default::default(), star_tok: None, }], lpar: vec![], rpar: vec![], - whitespace_after_func: Default::default(), - whitespace_before_args: Default::default(), lpar_tok, rpar_tok, } } -fn make_arg(expr: Expression) -> Arg { +fn make_arg<'input, 'a>(expr: Expression<'input, 'a>) -> Arg<'input, 'a> { Arg { value: expr, keyword: Default::default(), equal: Default::default(), comma: Default::default(), star: Default::default(), - whitespace_after_star: Default::default(), - whitespace_after_arg: Default::default(), star_tok: None, } } -fn make_comp_if<'a>(if_tok: TokenRef<'a>, test: Expression<'a>) -> CompIf<'a> { - CompIf { - test, - whitespace_before: Default::default(), - whitespace_before_test: Default::default(), - if_tok, - } +fn make_comp_if<'input, 'a>( + if_tok: TokenRef<'input, 'a>, + test: Expression<'input, 'a>, +) -> CompIf<'input, 'a> { + CompIf { test, if_tok } } -fn make_for_if<'a>( - async_tok: Option>, - for_tok: TokenRef<'a>, - target: AssignTargetExpression<'a>, - in_tok: TokenRef<'a>, - iter: Expression<'a>, - ifs: Vec>, -) -> CompFor<'a> { +fn make_for_if<'input, 'a>( + async_tok: Option>, + for_tok: TokenRef<'input, 'a>, + target: AssignTargetExpression<'input, 'a>, + in_tok: TokenRef<'input, 'a>, + iter: Expression<'input, 'a>, + ifs: Vec>, +) -> CompFor<'input, 'a> { let inner_for_in = None; - let asynchronous = async_tok.as_ref().map(|_| Asynchronous { - whitespace_after: Default::default(), - }); + let asynchronous = async_tok.as_ref().map(|_| make_async()); CompFor { target, @@ -2339,17 +2203,16 @@ fn make_for_if<'a>( ifs, inner_for_in, asynchronous, - whitespace_before: Default::default(), - whitespace_after_for: Default::default(), - whitespace_before_in: Default::default(), - whitespace_after_in: Default::default(), async_tok, for_tok, in_tok, } } -fn make_bare_genexp<'a>(elt: Expression<'a>, for_in: CompFor<'a>) -> GeneratorExp<'a> { +fn make_bare_genexp<'input, 'a>( + elt: Expression<'input, 'a>, + for_in: CompFor<'input, 'a>, +) -> GeneratorExp<'input, 'a> { GeneratorExp { elt: Box::new(elt), for_in: Box::new(for_in), @@ -2358,7 +2221,7 @@ fn make_bare_genexp<'a>(elt: Expression<'a>, for_in: CompFor<'a>) -> GeneratorEx } } -fn merge_comp_fors(comp_fors: Vec) -> CompFor { +fn merge_comp_fors<'input, 'a>(comp_fors: Vec>) -> CompFor<'input, 'a> { let mut it = comp_fors.into_iter().rev(); let first = it.next().expect("cant merge empty comp_fors"); @@ -2368,40 +2231,28 @@ fn merge_comp_fors(comp_fors: Vec) -> CompFor { }) } -fn make_left_bracket(tok: TokenRef) -> LeftSquareBracket { - LeftSquareBracket { - whitespace_after: Default::default(), - tok, - } +fn make_left_bracket<'input, 'a>(tok: TokenRef<'input, 'a>) -> LeftSquareBracket<'input, 'a> { + LeftSquareBracket { tok } } -fn make_right_bracket(tok: TokenRef) -> RightSquareBracket { - RightSquareBracket { - whitespace_before: Default::default(), - tok, - } +fn make_right_bracket<'input, 'a>(tok: TokenRef<'input, 'a>) -> RightSquareBracket<'input, 'a> { + RightSquareBracket { tok } } -fn make_left_brace(tok: TokenRef) -> LeftCurlyBrace { - LeftCurlyBrace { - whitespace_after: Default::default(), - tok, - } +fn make_left_brace<'input, 'a>(tok: TokenRef<'input, 'a>) -> LeftCurlyBrace<'input, 'a> { + LeftCurlyBrace { tok } } -fn make_right_brace(tok: TokenRef) -> RightCurlyBrace { - RightCurlyBrace { - whitespace_before: Default::default(), - tok, - } +fn make_right_brace<'input, 'a>(tok: TokenRef<'input, 'a>) -> RightCurlyBrace<'input, 'a> { + RightCurlyBrace { tok } } -fn make_list_comp<'a>( - lbracket: LeftSquareBracket<'a>, - elt: Expression<'a>, - for_in: CompFor<'a>, - rbracket: RightSquareBracket<'a>, -) -> ListComp<'a> { +fn make_list_comp<'input, 'a>( + lbracket: LeftSquareBracket<'input, 'a>, + elt: Expression<'input, 'a>, + for_in: CompFor<'input, 'a>, + rbracket: RightSquareBracket<'input, 'a>, +) -> ListComp<'input, 'a> { ListComp { elt: Box::new(elt), for_in: Box::new(for_in), @@ -2412,12 +2263,12 @@ fn make_list_comp<'a>( } } -fn make_set_comp<'a>( - lbrace: LeftCurlyBrace<'a>, - elt: Expression<'a>, - for_in: CompFor<'a>, - rbrace: RightCurlyBrace<'a>, -) -> SetComp<'a> { +fn make_set_comp<'input, 'a>( + lbrace: LeftCurlyBrace<'input, 'a>, + elt: Expression<'input, 'a>, + for_in: CompFor<'input, 'a>, + rbrace: RightCurlyBrace<'input, 'a>, +) -> SetComp<'input, 'a> { SetComp { elt: Box::new(elt), for_in: Box::new(for_in), @@ -2428,12 +2279,16 @@ fn make_set_comp<'a>( } } -fn make_dict_comp<'a>( - lbrace: LeftCurlyBrace<'a>, - kvpair: (Expression<'a>, TokenRef<'a>, Expression<'a>), - for_in: CompFor<'a>, - rbrace: RightCurlyBrace<'a>, -) -> DictComp<'a> { +fn make_dict_comp<'input, 'a>( + lbrace: LeftCurlyBrace<'input, 'a>, + kvpair: ( + Expression<'input, 'a>, + TokenRef<'input, 'a>, + Expression<'input, 'a>, + ), + for_in: CompFor<'input, 'a>, + rbrace: RightCurlyBrace<'input, 'a>, +) -> DictComp<'input, 'a> { let (key, colon_tok, value) = kvpair; DictComp { @@ -2444,17 +2299,15 @@ fn make_dict_comp<'a>( rbrace, lpar: vec![], rpar: vec![], - whitespace_before_colon: Default::default(), - whitespace_after_colon: Default::default(), colon_tok, } } -fn make_list<'a>( - lbracket: LeftSquareBracket<'a>, - elements: Vec>, - rbracket: RightSquareBracket<'a>, -) -> List<'a> { +fn make_list<'input, 'a>( + lbracket: LeftSquareBracket<'input, 'a>, + elements: Vec>, + rbracket: RightSquareBracket<'input, 'a>, +) -> List<'input, 'a> { List { elements, lbracket, @@ -2464,11 +2317,11 @@ fn make_list<'a>( } } -fn make_set<'a>( - lbrace: LeftCurlyBrace<'a>, - elements: Vec>, - rbrace: RightCurlyBrace<'a>, -) -> Set<'a> { +fn make_set<'input, 'a>( + lbrace: LeftCurlyBrace<'input, 'a>, + elements: Vec>, + rbrace: RightCurlyBrace<'input, 'a>, +) -> Set<'input, 'a> { Set { elements, lbrace, @@ -2478,13 +2331,13 @@ fn make_set<'a>( } } -fn comma_separate<'a, T>( +fn comma_separate<'input, 'a, T>( first: T, - rest: Vec<(Comma<'a>, T)>, - trailing_comma: Option>, + rest: Vec<(Comma<'input, 'a>, T)>, + trailing_comma: Option>, ) -> Vec where - T: WithComma<'a>, + T: WithComma<'input, 'a>, { let mut elements = vec![]; let mut current = first; @@ -2499,11 +2352,11 @@ where elements } -fn make_dict<'a>( - lbrace: LeftCurlyBrace<'a>, - elements: Vec>, - rbrace: RightCurlyBrace<'a>, -) -> Dict<'a> { +fn make_dict<'input, 'a>( + lbrace: LeftCurlyBrace<'input, 'a>, + elements: Vec>, + rbrace: RightCurlyBrace<'input, 'a>, +) -> Dict<'input, 'a> { Dict { elements, lbrace, @@ -2513,70 +2366,65 @@ fn make_dict<'a>( } } -fn make_double_starred_keypairs<'a>( - first: DictElement<'a>, - rest: Vec<(Comma<'a>, DictElement<'a>)>, - trailing_comma: Option>, -) -> Vec> { +fn make_double_starred_keypairs<'input, 'a>( + first: DictElement<'input, 'a>, + rest: Vec<(Comma<'input, 'a>, DictElement<'input, 'a>)>, + trailing_comma: Option>, +) -> Vec> { let mut elements = vec![]; let mut current = first; for (comma, next) in rest { elements.push(current.with_comma(comma)); current = next; } - if let Some(mut comma) = trailing_comma { - // don't consume trailing whitespace for trailing comma - comma.whitespace_after = ParenthesizableWhitespace::SimpleWhitespace(SimpleWhitespace("")); + if let Some(comma) = trailing_comma { current = current.with_comma(comma); } elements.push(current); elements } -fn make_dict_element<'a>(el: (Expression<'a>, TokenRef<'a>, Expression<'a>)) -> DictElement<'a> { +fn make_dict_element<'input, 'a>( + el: ( + Expression<'input, 'a>, + TokenRef<'input, 'a>, + Expression<'input, 'a>, + ), +) -> DictElement<'input, 'a> { let (key, colon_tok, value) = el; DictElement::Simple { key, value, comma: Default::default(), - whitespace_before_colon: Default::default(), - whitespace_after_colon: Default::default(), colon_tok, } } -fn make_double_starred_element<'a>( - star_tok: TokenRef<'a>, - value: Expression<'a>, -) -> StarredDictElement<'a> { +fn make_double_starred_element<'input, 'a>( + star_tok: TokenRef<'input, 'a>, + value: Expression<'input, 'a>, +) -> StarredDictElement<'input, 'a> { StarredDictElement { value, comma: Default::default(), - whitespace_before_value: Default::default(), star_tok, } } -fn make_index(value: Expression) -> BaseSlice { +fn make_index<'input, 'a>(value: Expression<'input, 'a>) -> BaseSlice<'input, 'a> { BaseSlice::Index(Box::new(Index { value })) } -fn make_colon(tok: TokenRef) -> Colon { - let whitespace_before = Default::default(); - let whitespace_after = Default::default(); - Colon { - whitespace_before, - whitespace_after, - tok, - } +fn make_colon<'input, 'a>(tok: TokenRef<'input, 'a>) -> Colon<'input, 'a> { + Colon { tok } } -fn make_slice<'a>( - lower: Option>, - first_colon: TokenRef<'a>, - upper: Option>, - rest: Option<(TokenRef<'a>, Option>)>, -) -> BaseSlice<'a> { +fn make_slice<'input, 'a>( + lower: Option>, + first_colon: TokenRef<'input, 'a>, + upper: Option>, + rest: Option<(TokenRef<'input, 'a>, Option>)>, +) -> BaseSlice<'input, 'a> { let first_colon = make_colon(first_colon); let (second_colon, step) = if let Some((tok, step)) = rest { (Some(make_colon(tok)), step) @@ -2592,11 +2440,11 @@ fn make_slice<'a>( })) } -fn make_slices<'a>( - first: BaseSlice<'a>, - rest: Vec<(Comma<'a>, BaseSlice<'a>)>, - trailing_comma: Option>, -) -> Vec> { +fn make_slices<'input, 'a>( + first: BaseSlice<'input, 'a>, + rest: Vec<(Comma<'input, 'a>, BaseSlice<'input, 'a>)>, + trailing_comma: Option>, +) -> Vec> { let mut elements = vec![]; let mut current = first; for (comma, next) in rest { @@ -2613,13 +2461,12 @@ fn make_slices<'a>( elements } -fn make_subscript<'a>( - value: Expression<'a>, - lbracket: LeftSquareBracket<'a>, - slice: Vec>, - rbracket: RightSquareBracket<'a>, -) -> Subscript<'a> { - let lbracket_tok = lbracket.tok.clone(); +fn make_subscript<'input, 'a>( + value: Expression<'input, 'a>, + lbracket: LeftSquareBracket<'input, 'a>, + slice: Vec>, + rbracket: RightSquareBracket<'input, 'a>, +) -> Subscript<'input, 'a> { Subscript { value: Box::new(value), slice, @@ -2627,37 +2474,31 @@ fn make_subscript<'a>( rbracket, lpar: Default::default(), rpar: Default::default(), - whitespace_after_value: Default::default(), - lbracket_tok, } } -fn make_ifexp<'a>( - body: Expression<'a>, - if_tok: TokenRef<'a>, - test: Expression<'a>, - else_tok: TokenRef<'a>, - orelse: Expression<'a>, -) -> IfExp<'a> { +fn make_ifexp<'input, 'a>( + body: Expression<'input, 'a>, + if_tok: TokenRef<'input, 'a>, + test: Expression<'input, 'a>, + else_tok: TokenRef<'input, 'a>, + orelse: Expression<'input, 'a>, +) -> IfExp<'input, 'a> { IfExp { test: Box::new(test), body: Box::new(body), orelse: Box::new(orelse), lpar: Default::default(), rpar: Default::default(), - whitespace_before_if: Default::default(), - whitespace_after_if: Default::default(), - whitespace_before_else: Default::default(), - whitespace_after_else: Default::default(), if_tok, else_tok, } } -fn add_arguments_trailing_comma<'a>( - mut args: Vec>, - trailing_comma: Option>, -) -> Vec> { +fn add_arguments_trailing_comma<'input, 'a>( + mut args: Vec>, + trailing_comma: Option>, +) -> Vec> { if let Some(comma) = trailing_comma { let last = args.pop().unwrap(); args.push(last.with_comma(comma)); @@ -2665,12 +2506,12 @@ fn add_arguments_trailing_comma<'a>( args } -fn make_lambda<'a>( - lambda_tok: TokenRef<'a>, - params: Parameters<'a>, - colon_tok: TokenRef<'a>, - expr: Expression<'a>, -) -> Lambda<'a> { +fn make_lambda<'input, 'a>( + lambda_tok: TokenRef<'input, 'a>, + params: Parameters<'input, 'a>, + colon_tok: TokenRef<'input, 'a>, + expr: Expression<'input, 'a>, +) -> Lambda<'input, 'a> { let colon = make_colon(colon_tok); Lambda { params: Box::new(params), @@ -2678,26 +2519,26 @@ fn make_lambda<'a>( colon, lpar: Default::default(), rpar: Default::default(), - whitespace_after_lambda: Default::default(), lambda_tok, } } -fn make_annotation<'a>(tok: TokenRef<'a>, ann: Expression<'a>) -> Annotation<'a> { +fn make_annotation<'input, 'a>( + tok: TokenRef<'input, 'a>, + ann: Expression<'input, 'a>, +) -> Annotation<'input, 'a> { Annotation { annotation: ann, - whitespace_before_indicator: Default::default(), - whitespace_after_indicator: Default::default(), tok, } } -fn make_ann_assignment<'a>( - target: AssignTargetExpression<'a>, - col: TokenRef<'a>, - ann: Expression<'a>, - rhs: Option<(TokenRef<'a>, Expression<'a>)>, -) -> AnnAssign<'a> { +fn make_ann_assignment<'input, 'a>( + target: AssignTargetExpression<'input, 'a>, + col: TokenRef<'input, 'a>, + ann: Expression<'input, 'a>, + rhs: Option<(TokenRef<'input, 'a>, Expression<'input, 'a>)>, +) -> AnnAssign<'input, 'a> { let annotation = make_annotation(col, ann); let (eq, value) = rhs.map(|(x, y)| (Some(x), Some(y))).unwrap_or((None, None)); let equal = eq.map(make_assign_equal); @@ -2710,11 +2551,11 @@ fn make_ann_assignment<'a>( } } -fn make_yield<'a>( - yield_tok: TokenRef<'a>, - f: Option>, - e: Option>, -) -> Yield<'a> { +fn make_yield<'input, 'a>( + yield_tok: TokenRef<'input, 'a>, + f: Option>, + e: Option>, +) -> Yield<'input, 'a> { let value = match (f, e) { (None, None) => None, (Some(f), Some(e)) => Some(YieldValue::From(Box::new(make_from(f, e)))), @@ -2725,34 +2566,30 @@ fn make_yield<'a>( value: value.map(Box::new), lpar: Default::default(), rpar: Default::default(), - whitespace_after_yield: Default::default(), yield_tok, } } -fn make_from<'a>(tok: TokenRef<'a>, e: Expression<'a>) -> From<'a> { - From { - item: e, - whitespace_before_from: Default::default(), - whitespace_after_from: Default::default(), - tok, - } +fn make_from<'input, 'a>(tok: TokenRef<'input, 'a>, e: Expression<'input, 'a>) -> From<'input, 'a> { + From { item: e, tok } } -fn make_return<'a>(return_tok: TokenRef<'a>, value: Option>) -> Return<'a> { +fn make_return<'input, 'a>( + return_tok: TokenRef<'input, 'a>, + value: Option>, +) -> Return<'input, 'a> { Return { value, - whitespace_after_return: Default::default(), semicolon: Default::default(), return_tok, } } -fn make_assert<'a>( - assert_tok: TokenRef<'a>, - test: Expression<'a>, - rest: Option<(Comma<'a>, Expression<'a>)>, -) -> Assert<'a> { +fn make_assert<'input, 'a>( + assert_tok: TokenRef<'input, 'a>, + test: Expression<'input, 'a>, + rest: Option<(Comma<'input, 'a>, Expression<'input, 'a>)>, +) -> Assert<'input, 'a> { let (comma, msg) = if let Some((c, msg)) = rest { (Some(c), Some(msg)) } else { @@ -2763,34 +2600,32 @@ fn make_assert<'a>( test, msg, comma, - whitespace_after_assert: Default::default(), semicolon: Default::default(), assert_tok, } } -fn make_raise<'a>( - raise_tok: TokenRef<'a>, - exc: Option>, - rest: Option<(TokenRef<'a>, Expression<'a>)>, -) -> Raise<'a> { +fn make_raise<'input, 'a>( + raise_tok: TokenRef<'input, 'a>, + exc: Option>, + rest: Option<(TokenRef<'input, 'a>, Expression<'input, 'a>)>, +) -> Raise<'input, 'a> { let cause = rest.map(|(t, e)| make_from(t, e)); Raise { exc, cause, - whitespace_after_raise: Default::default(), semicolon: Default::default(), raise_tok, } } -fn make_global<'a>( - tok: TokenRef<'a>, - init: Vec<(Name<'a>, Comma<'a>)>, - last: Name<'a>, -) -> Global<'a> { - let mut names: Vec> = init +fn make_global<'input, 'a>( + tok: TokenRef<'input, 'a>, + init: Vec<(Name<'input, 'a>, Comma<'input, 'a>)>, + last: Name<'input, 'a>, +) -> Global<'input, 'a> { + let mut names: Vec> = init .into_iter() .map(|(name, c)| NameItem { name, @@ -2803,18 +2638,17 @@ fn make_global<'a>( }); Global { names, - whitespace_after_global: Default::default(), semicolon: Default::default(), tok, } } -fn make_nonlocal<'a>( - tok: TokenRef<'a>, - init: Vec<(Name<'a>, Comma<'a>)>, - last: Name<'a>, -) -> Nonlocal<'a> { - let mut names: Vec> = init +fn make_nonlocal<'input, 'a>( + tok: TokenRef<'input, 'a>, + init: Vec<(Name<'input, 'a>, Comma<'input, 'a>)>, + last: Name<'input, 'a>, +) -> Nonlocal<'input, 'a> { + let mut names: Vec> = init .into_iter() .map(|(name, c)| NameItem { name, @@ -2827,26 +2661,23 @@ fn make_nonlocal<'a>( }); Nonlocal { names, - whitespace_after_nonlocal: Default::default(), semicolon: Default::default(), tok, } } #[allow(clippy::too_many_arguments)] -fn make_for<'a>( - async_tok: Option>, - for_tok: TokenRef<'a>, - target: AssignTargetExpression<'a>, - in_tok: TokenRef<'a>, - iter: Expression<'a>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, - orelse: Option>, -) -> For<'a> { - let asynchronous = async_tok.as_ref().map(|_| Asynchronous { - whitespace_after: Default::default(), - }); +fn make_for<'input, 'a>( + async_tok: Option>, + for_tok: TokenRef<'input, 'a>, + target: AssignTargetExpression<'input, 'a>, + in_tok: TokenRef<'input, 'a>, + iter: Expression<'input, 'a>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, + orelse: Option>, +) -> For<'input, 'a> { + let asynchronous = async_tok.as_ref().map(|_| make_async()); For { target, @@ -2854,11 +2685,6 @@ fn make_for<'a>( body, orelse, asynchronous, - leading_lines: Default::default(), - whitespace_after_for: Default::default(), - whitespace_before_in: Default::default(), - whitespace_after_in: Default::default(), - whitespace_before_colon: Default::default(), async_tok, for_tok, in_tok, @@ -2866,50 +2692,55 @@ fn make_for<'a>( } } -fn make_while<'a>( - while_tok: TokenRef<'a>, - test: Expression<'a>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, - orelse: Option>, -) -> While<'a> { +fn make_while<'input, 'a>( + while_tok: TokenRef<'input, 'a>, + test: Expression<'input, 'a>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, + orelse: Option>, +) -> While<'input, 'a> { While { test, body, orelse, - leading_lines: Default::default(), - whitespace_after_while: Default::default(), - whitespace_before_colon: Default::default(), while_tok, colon_tok, } } -fn make_await<'a>(await_tok: TokenRef<'a>, expression: Expression<'a>) -> Await<'a> { +fn make_await<'input, 'a>( + await_tok: TokenRef<'input, 'a>, + expression: Expression<'input, 'a>, +) -> Await<'input, 'a> { Await { expression: Box::new(expression), lpar: Default::default(), rpar: Default::default(), - whitespace_after_await: Default::default(), await_tok, } } -fn make_class_def<'a>( - class_tok: TokenRef<'a>, - name: Name<'a>, - args: Option<(LeftParen<'a>, Option>>, RightParen<'a>)>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, -) -> std::result::Result, &'static str> { +fn make_class_def<'input, 'a>( + class_tok: TokenRef<'input, 'a>, + name: Name<'input, 'a>, + args: Option<( + LeftParen<'input, 'a>, + Option>>, + RightParen<'input, 'a>, + )>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, +) -> std::result::Result, &'static str> { let mut bases = vec![]; let mut keywords = vec![]; - let mut parens_tok = None; + let mut lpar_tok = None; + let mut rpar_tok = None; let mut lpar = None; let mut rpar = None; if let Some((lpar_, args, rpar_)) = args { - parens_tok = Some((lpar_.lpar_tok.clone(), rpar_.rpar_tok.clone())); + lpar_tok = Some(lpar_.lpar_tok); + rpar_tok = Some(rpar_.rpar_tok); lpar = Some(lpar_); rpar = Some(rpar_); if let Some(args) = args { @@ -2938,32 +2769,29 @@ fn make_class_def<'a>( decorators: vec![], lpar, rpar, - leading_lines: Default::default(), - lines_after_decorators: Default::default(), - whitespace_after_class: Default::default(), - whitespace_after_name: Default::default(), - whitespace_before_colon: Default::default(), class_tok, - parens_tok, + lpar_tok, + rpar_tok, colon_tok, }) } -fn make_string(tok: TokenRef) -> String { +fn make_string<'input, 'a>(tok: TokenRef<'input, 'a>) -> String<'input, 'a> { String::Simple(SimpleString { value: tok.string, ..Default::default() }) } -fn make_strings<'a>(s: Vec<(String<'a>, TokenRef<'a>)>) -> String<'a> { +fn make_strings<'input, 'a>( + s: Vec<(String<'input, 'a>, TokenRef<'input, 'a>)>, +) -> String<'input, 'a> { let mut strings = s.into_iter().rev(); let (first, _) = strings.next().expect("no strings to make a string of"); strings.fold(first, |acc, (str, tok)| { - let ret: String<'a> = String::Concatenated(ConcatenatedString { + let ret: String<'input, 'a> = String::Concatenated(ConcatenatedString { left: Box::new(str), right: Box::new(acc), - whitespace_between: Default::default(), lpar: Default::default(), rpar: Default::default(), right_tok: tok, @@ -2972,14 +2800,17 @@ fn make_strings<'a>(s: Vec<(String<'a>, TokenRef<'a>)>) -> String<'a> { }) } -fn make_fstring_expression<'a>( - lbrace_tok: TokenRef<'a>, - expression: Expression<'a>, - eq: Option>, - conversion_pair: Option<(TokenRef<'a>, &'a str)>, - format_pair: Option<(TokenRef<'a>, Vec>)>, - rbrace_tok: TokenRef<'a>, -) -> FormattedStringExpression<'a> { +fn make_fstring_expression<'input, 'a>( + lbrace_tok: TokenRef<'input, 'a>, + expression: Expression<'input, 'a>, + eq: Option>, + conversion_pair: Option<(TokenRef<'input, 'a>, &'a str)>, + format_pair: Option<( + TokenRef<'input, 'a>, + Vec>, + )>, + rbrace_tok: TokenRef<'input, 'a>, +) -> FormattedStringExpression<'input, 'a> { let equal = eq.map(make_assign_equal); let (conversion_tok, conversion) = if let Some((t, c)) = conversion_pair { (Some(t), Some(c)) @@ -3005,19 +2836,17 @@ fn make_fstring_expression<'a>( expression, conversion, format_spec, - whitespace_before_expression: Default::default(), - whitespace_after_expression: Default::default(), equal, lbrace_tok, after_expr_tok, } } -fn make_fstring<'a>( +fn make_fstring<'input, 'a>( start: &'a str, - parts: Vec>, + parts: Vec>, end: &'a str, -) -> FormattedString<'a> { +) -> FormattedString<'input, 'a> { FormattedString { start, parts, @@ -3027,180 +2856,112 @@ fn make_fstring<'a>( } } -fn make_finally<'a>( - finally_tok: TokenRef<'a>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, -) -> Finally<'a> { +fn make_finally<'input, 'a>( + finally_tok: TokenRef<'input, 'a>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, +) -> Finally<'input, 'a> { Finally { body, - leading_lines: Default::default(), - whitespace_before_colon: Default::default(), finally_tok, colon_tok, } } -fn make_except<'a>( - except_tok: TokenRef<'a>, - exp: Option>, - as_: Option<(TokenRef<'a>, Name<'a>)>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, -) -> ExceptHandler<'a> { +fn make_except<'input, 'a>( + except_tok: TokenRef<'input, 'a>, + exp: Option>, + as_: Option<(TokenRef<'input, 'a>, Name<'input, 'a>)>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, +) -> ExceptHandler<'input, 'a> { // TODO: AsName should come from outside let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))); ExceptHandler { body, r#type: exp, name, - leading_lines: Default::default(), - whitespace_after_except: Default::default(), - whitespace_before_colon: Default::default(), except_tok, colon_tok, } } -fn make_except_star<'a>( - except_tok: TokenRef<'a>, - star_tok: TokenRef<'a>, - exp: Expression<'a>, - as_: Option<(TokenRef<'a>, Name<'a>)>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, -) -> ExceptStarHandler<'a> { +fn make_except_star<'input, 'a>( + except_tok: TokenRef<'input, 'a>, + star_tok: TokenRef<'input, 'a>, + exp: Expression<'input, 'a>, + as_: Option<(TokenRef<'input, 'a>, Name<'input, 'a>)>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, +) -> ExceptStarHandler<'input, 'a> { // TODO: AsName should come from outside let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))); ExceptStarHandler { body, r#type: exp, name, - leading_lines: Default::default(), - whitespace_after_except: Default::default(), - whitespace_after_star: Default::default(), - whitespace_before_colon: Default::default(), except_tok, colon_tok, star_tok, } } -fn make_try<'a>( - try_tok: TokenRef<'a>, - body: Suite<'a>, - handlers: Vec>, - orelse: Option>, - finalbody: Option>, -) -> Try<'a> { +fn make_try<'input, 'a>( + try_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, + handlers: Vec>, + orelse: Option>, + finalbody: Option>, +) -> Try<'input, 'a> { Try { body, handlers, orelse, finalbody, - leading_lines: Default::default(), - whitespace_before_colon: Default::default(), try_tok, } } -fn make_try_star<'a>( - try_tok: TokenRef<'a>, - body: Suite<'a>, - handlers: Vec>, - orelse: Option>, - finalbody: Option>, -) -> TryStar<'a> { +fn make_try_star<'input, 'a>( + try_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, + handlers: Vec>, + orelse: Option>, + finalbody: Option>, +) -> TryStar<'input, 'a> { TryStar { body, handlers, orelse, finalbody, - leading_lines: Default::default(), - whitespace_before_colon: Default::default(), try_tok, } } -fn make_aug_op(tok: TokenRef) -> Result { - let whitespace_before = Default::default(); - let whitespace_after = Default::default(); - +fn make_aug_op<'input, 'a>(tok: TokenRef<'input, 'a>) -> Result<'a, AugOp<'input, 'a>> { Ok(match tok.string { - "+=" => AugOp::AddAssign { - whitespace_before, - whitespace_after, - tok, - }, - "-=" => AugOp::SubtractAssign { - whitespace_before, - whitespace_after, - tok, - }, - "*=" => AugOp::MultiplyAssign { - whitespace_before, - whitespace_after, - tok, - }, - "@=" => AugOp::MatrixMultiplyAssign { - whitespace_before, - whitespace_after, - tok, - }, - "/=" => AugOp::DivideAssign { - whitespace_before, - whitespace_after, - tok, - }, - "%=" => AugOp::ModuloAssign { - whitespace_before, - whitespace_after, - tok, - }, - "&=" => AugOp::BitAndAssign { - whitespace_before, - whitespace_after, - tok, - }, - "|=" => AugOp::BitOrAssign { - whitespace_before, - whitespace_after, - tok, - }, - "^=" => AugOp::BitXorAssign { - whitespace_before, - whitespace_after, - tok, - }, - "<<=" => AugOp::LeftShiftAssign { - whitespace_before, - whitespace_after, - tok, - }, - ">>=" => AugOp::RightShiftAssign { - whitespace_before, - whitespace_after, - tok, - }, - "**=" => AugOp::PowerAssign { - whitespace_before, - whitespace_after, - tok, - }, - "//=" => AugOp::FloorDivideAssign { - whitespace_before, - whitespace_after, - tok, - }, + "+=" => AugOp::AddAssign { tok }, + "-=" => AugOp::SubtractAssign { tok }, + "*=" => AugOp::MultiplyAssign { tok }, + "@=" => AugOp::MatrixMultiplyAssign { tok }, + "/=" => AugOp::DivideAssign { tok }, + "%=" => AugOp::ModuloAssign { tok }, + "&=" => AugOp::BitAndAssign { tok }, + "|=" => AugOp::BitOrAssign { tok }, + "^=" => AugOp::BitXorAssign { tok }, + "<<=" => AugOp::LeftShiftAssign { tok }, + ">>=" => AugOp::RightShiftAssign { tok }, + "**=" => AugOp::PowerAssign { tok }, + "//=" => AugOp::FloorDivideAssign { tok }, _ => return Err(ParserError::OperatorError), }) } -fn make_aug_assign<'a>( - target: AssignTargetExpression<'a>, - operator: AugOp<'a>, - value: Expression<'a>, -) -> AugAssign<'a> { +fn make_aug_assign<'input, 'a>( + target: AssignTargetExpression<'input, 'a>, + operator: AugOp<'input, 'a>, + value: Expression<'input, 'a>, +) -> AugAssign<'input, 'a> { AugAssign { target, operator, @@ -3209,11 +2970,11 @@ fn make_aug_assign<'a>( } } -fn make_with_item<'a>( - item: Expression<'a>, - as_: Option>, - n: Option>, -) -> WithItem<'a> { +fn make_with_item<'input, 'a>( + item: Expression<'input, 'a>, + as_: Option>, + n: Option>, +) -> WithItem<'input, 'a> { let asname = match (as_, n) { (Some(as_), Some(n)) => Some(make_as_name(as_, n)), (None, None) => None, @@ -3226,47 +2987,44 @@ fn make_with_item<'a>( } } -fn make_with<'a>( - async_tok: Option>, - with_tok: TokenRef<'a>, - lpar: Option>, - items: Vec>, - rpar: Option>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, -) -> With<'a> { - let asynchronous = async_tok.as_ref().map(|_| Asynchronous { - whitespace_after: Default::default(), - }); +fn make_with<'input, 'a>( + async_tok: Option>, + with_tok: TokenRef<'input, 'a>, + lpar: Option>, + items: Vec>, + rpar: Option>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, +) -> With<'input, 'a> { + let asynchronous = async_tok.as_ref().map(|_| make_async()); With { items, body, asynchronous, - leading_lines: Default::default(), lpar, rpar, - whitespace_after_with: Default::default(), - whitespace_before_colon: Default::default(), async_tok, with_tok, colon_tok, } } -fn make_del<'a>(tok: TokenRef<'a>, target: DelTargetExpression<'a>) -> Del<'a> { +fn make_del<'input, 'a>( + tok: TokenRef<'input, 'a>, + target: DelTargetExpression<'input, 'a>, +) -> Del<'input, 'a> { Del { target, - whitespace_after_del: Default::default(), semicolon: Default::default(), tok, } } -fn make_del_tuple<'a>( - lpar: Option>, - elements: Vec>, - rpar: Option>, -) -> DelTargetExpression<'a> { +fn make_del_tuple<'input, 'a>( + lpar: Option>, + elements: Vec>, + rpar: Option>, +) -> DelTargetExpression<'input, 'a> { DelTargetExpression::Tuple(Box::new(Tuple { elements, lpar: lpar.map(|x| vec![x]).unwrap_or_default(), @@ -3274,35 +3032,32 @@ fn make_del_tuple<'a>( })) } -fn make_named_expr<'a>(name: Name<'a>, tok: TokenRef<'a>, expr: Expression<'a>) -> NamedExpr<'a> { +fn make_named_expr<'input, 'a>( + name: Name<'input, 'a>, + tok: TokenRef<'input, 'a>, + expr: Expression<'input, 'a>, +) -> NamedExpr<'input, 'a> { NamedExpr { target: Box::new(Expression::Name(Box::new(name))), value: Box::new(expr), lpar: Default::default(), rpar: Default::default(), - whitespace_before_walrus: Default::default(), - whitespace_after_walrus: Default::default(), walrus_tok: tok, } } -fn make_match<'a>( - match_tok: TokenRef<'a>, - subject: Expression<'a>, - colon_tok: TokenRef<'a>, - indent_tok: TokenRef<'a>, - cases: Vec>, - dedent_tok: TokenRef<'a>, -) -> Match<'a> { +fn make_match<'input, 'a>( + match_tok: TokenRef<'input, 'a>, + subject: Expression<'input, 'a>, + colon_tok: TokenRef<'input, 'a>, + indent_tok: TokenRef<'input, 'a>, + cases: Vec>, + dedent_tok: TokenRef<'input, 'a>, +) -> Match<'input, 'a> { Match { subject, cases, - leading_lines: Default::default(), - whitespace_after_match: Default::default(), - whitespace_before_colon: Default::default(), - whitespace_after_colon: Default::default(), indent: Default::default(), - footer: Default::default(), match_tok, colon_tok, indent_tok, @@ -3310,13 +3065,13 @@ fn make_match<'a>( } } -fn make_case<'a>( - case_tok: TokenRef<'a>, - pattern: MatchPattern<'a>, - guard: Option<(TokenRef<'a>, Expression<'a>)>, - colon_tok: TokenRef<'a>, - body: Suite<'a>, -) -> MatchCase<'a> { +fn make_case<'input, 'a>( + case_tok: TokenRef<'input, 'a>, + pattern: MatchPattern<'input, 'a>, + guard: Option<(TokenRef<'input, 'a>, Expression<'input, 'a>)>, + colon_tok: TokenRef<'input, 'a>, + body: Suite<'input, 'a>, +) -> MatchCase<'input, 'a> { let (if_tok, guard) = match guard { Some((if_tok, guard)) => (Some(if_tok), Some(guard)), None => (None, None), @@ -3325,30 +3080,25 @@ fn make_case<'a>( pattern, guard, body, - leading_lines: Default::default(), - whitespace_after_case: Default::default(), - whitespace_before_if: Default::default(), - whitespace_after_if: Default::default(), - whitespace_before_colon: Default::default(), case_tok, if_tok, colon_tok, } } -fn make_match_value(value: Expression) -> MatchPattern { +fn make_match_value<'input, 'a>(value: Expression<'input, 'a>) -> MatchPattern<'input, 'a> { MatchPattern::Value(MatchValue { value }) } -fn make_match_singleton(value: Name) -> MatchPattern { +fn make_match_singleton<'input, 'a>(value: Name<'input, 'a>) -> MatchPattern<'input, 'a> { MatchPattern::Singleton(MatchSingleton { value }) } -fn make_list_pattern<'a>( - lbracket: Option>, - patterns: Vec>, - rbracket: Option>, -) -> MatchSequence<'a> { +fn make_list_pattern<'input, 'a>( + lbracket: Option>, + patterns: Vec>, + rbracket: Option>, +) -> MatchSequence<'input, 'a> { MatchSequence::MatchList(MatchList { patterns, lbracket, @@ -3358,34 +3108,28 @@ fn make_list_pattern<'a>( }) } -fn make_as_pattern<'a>( - pattern: Option>, - as_tok: Option>, - name: Option>, -) -> MatchPattern<'a> { +fn make_as_pattern<'input, 'a>( + pattern: Option>, + as_tok: Option>, + name: Option>, +) -> MatchPattern<'input, 'a> { MatchPattern::As(Box::new(MatchAs { pattern, name, lpar: Default::default(), rpar: Default::default(), - whitespace_before_as: Default::default(), - whitespace_after_as: Default::default(), as_tok, })) } -fn make_bit_or(tok: TokenRef) -> BitOr { - BitOr { - whitespace_before: Default::default(), - whitespace_after: Default::default(), - tok, - } +fn make_bit_or<'input, 'a>(tok: TokenRef<'input, 'a>) -> BitOr<'input, 'a> { + BitOr { tok } } -fn make_or_pattern<'a>( - first: MatchPattern<'a>, - rest: Vec<(TokenRef<'a>, MatchPattern<'a>)>, -) -> MatchPattern<'a> { +fn make_or_pattern<'input, 'a>( + first: MatchPattern<'input, 'a>, + rest: Vec<(TokenRef<'input, 'a>, MatchPattern<'input, 'a>)>, +) -> MatchPattern<'input, 'a> { if rest.is_empty() { return first; } @@ -3411,25 +3155,29 @@ fn make_or_pattern<'a>( })) } -fn ensure_real_number(tok: TokenRef) -> GrammarResult { +fn ensure_real_number<'input, 'a>( + tok: TokenRef<'input, 'a>, +) -> GrammarResult> { match make_number(tok) { e @ (Expression::Integer(_) | Expression::Float(_)) => Ok(e), _ => Err("real number"), } } -fn ensure_imaginary_number(tok: TokenRef) -> GrammarResult { +fn ensure_imaginary_number<'input, 'a>( + tok: TokenRef<'input, 'a>, +) -> GrammarResult> { match make_number(tok) { e @ Expression::Imaginary(_) => Ok(e), _ => Err("imaginary number"), } } -fn make_tuple_pattern<'a>( - lpar: LeftParen<'a>, - patterns: Vec>, - rpar: RightParen<'a>, -) -> MatchSequence<'a> { +fn make_tuple_pattern<'input, 'a>( + lpar: LeftParen<'input, 'a>, + patterns: Vec>, + rpar: RightParen<'input, 'a>, +) -> MatchSequence<'input, 'a> { MatchSequence::MatchTuple(MatchTuple { patterns, lpar: vec![lpar], @@ -3437,40 +3185,44 @@ fn make_tuple_pattern<'a>( }) } -fn make_open_sequence_pattern<'a>( - first: StarrableMatchSequenceElement<'a>, - comma: Comma<'a>, - mut rest: Vec>, -) -> Vec> { +fn make_open_sequence_pattern<'input, 'a>( + first: StarrableMatchSequenceElement<'input, 'a>, + comma: Comma<'input, 'a>, + mut rest: Vec>, +) -> Vec> { rest.insert(0, first.with_comma(comma)); rest } -fn make_match_sequence_element(value: MatchPattern) -> MatchSequenceElement { +fn make_match_sequence_element<'input, 'a>( + value: MatchPattern<'input, 'a>, +) -> MatchSequenceElement<'input, 'a> { MatchSequenceElement { value, comma: Default::default(), } } -fn make_match_star<'a>(star_tok: TokenRef<'a>, name: Option>) -> MatchStar<'a> { +fn make_match_star<'input, 'a>( + star_tok: TokenRef<'input, 'a>, + name: Option>, +) -> MatchStar<'input, 'a> { MatchStar { name, comma: Default::default(), - whitespace_before_name: Default::default(), star_tok, } } -fn make_match_mapping<'a>( - lbrace: LeftCurlyBrace<'a>, - mut elements: Vec>, - el_comma: Option>, - star_tok: Option>, - rest: Option>, - trailing_comma: Option>, - rbrace: RightCurlyBrace<'a>, -) -> MatchPattern<'a> { +fn make_match_mapping<'input, 'a>( + lbrace: LeftCurlyBrace<'input, 'a>, + mut elements: Vec>, + el_comma: Option>, + star_tok: Option>, + rest: Option>, + trailing_comma: Option>, + rbrace: RightCurlyBrace<'input, 'a>, +) -> MatchPattern<'input, 'a> { if let Some(c) = el_comma { if let Some(el) = elements.pop() { elements.push(el.with_comma(c)); @@ -3485,35 +3237,32 @@ fn make_match_mapping<'a>( rbrace, lpar: Default::default(), rpar: Default::default(), - whitespace_before_rest: Default::default(), star_tok, }) } -fn make_match_mapping_element<'a>( - key: Expression<'a>, - colon_tok: TokenRef<'a>, - pattern: MatchPattern<'a>, -) -> MatchMappingElement<'a> { +fn make_match_mapping_element<'input, 'a>( + key: Expression<'input, 'a>, + colon_tok: TokenRef<'input, 'a>, + pattern: MatchPattern<'input, 'a>, +) -> MatchMappingElement<'input, 'a> { MatchMappingElement { key, pattern, comma: Default::default(), - whitespace_before_colon: Default::default(), - whitespace_after_colon: Default::default(), colon_tok, } } -fn make_class_pattern<'a>( - cls: NameOrAttribute<'a>, - lpar_tok: TokenRef<'a>, - mut patterns: Vec>, - pat_comma: Option>, - mut kwds: Vec>, - kwd_comma: Option>, - rpar_tok: TokenRef<'a>, -) -> MatchPattern<'a> { +fn make_class_pattern<'input, 'a>( + cls: NameOrAttribute<'input, 'a>, + lpar_tok: TokenRef<'input, 'a>, + mut patterns: Vec>, + pat_comma: Option>, + mut kwds: Vec>, + kwd_comma: Option>, + rpar_tok: TokenRef<'input, 'a>, +) -> MatchPattern<'input, 'a> { if let Some(c) = pat_comma { if let Some(el) = patterns.pop() { patterns.push(el.with_comma(c)); @@ -3532,25 +3281,20 @@ fn make_class_pattern<'a>( kwds, lpar: Default::default(), rpar: Default::default(), - whitespace_after_cls: Default::default(), - whitespace_before_patterns: Default::default(), - whitespace_after_kwds: Default::default(), lpar_tok, rpar_tok, }) } -fn make_match_keyword_element<'a>( - key: Name<'a>, - equal_tok: TokenRef<'a>, - pattern: MatchPattern<'a>, -) -> MatchKeywordElement<'a> { +fn make_match_keyword_element<'input, 'a>( + key: Name<'input, 'a>, + equal_tok: TokenRef<'input, 'a>, + pattern: MatchPattern<'input, 'a>, +) -> MatchKeywordElement<'input, 'a> { MatchKeywordElement { key, pattern, comma: Default::default(), - whitespace_before_equal: Default::default(), - whitespace_after_equal: Default::default(), equal_tok, } } diff --git a/native/libcst/src/parser/mod.rs b/native/libcst/src/parser/mod.rs index 05937ab3..4e9b4654 100644 --- a/native/libcst/src/parser/mod.rs +++ b/native/libcst/src/parser/mod.rs @@ -8,4 +8,5 @@ mod grammar; mod numbers; pub use errors::ParserError; +pub(crate) use grammar::TokVec; pub use grammar::{python, Result}; diff --git a/native/libcst/src/parser/numbers.rs b/native/libcst/src/parser/numbers.rs index 286a33c7..6d7a0d8e 100644 --- a/native/libcst/src/parser/numbers.rs +++ b/native/libcst/src/parser/numbers.rs @@ -6,7 +6,7 @@ use once_cell::sync::Lazy; use regex::Regex; -use crate::{Expression, Float, Imaginary, Integer}; +use crate::nodes::deflated::{Expression, Float, Imaginary, Integer}; static HEX: &str = r"0[xX](?:_?[0-9a-fA-F])+"; static BIN: &str = r"0[bB](?:_?[01])+"; diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 95bf4d2d..5ce55e09 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -9,3 +9,6 @@ proc-macro = true [dependencies] syn = "1.0" quote = "1.0" + +[dev-dependencies] +trybuild = "1.0" diff --git a/native/libcst_derive/src/cstnode.rs b/native/libcst_derive/src/cstnode.rs new file mode 100644 index 00000000..480f954d --- /dev/null +++ b/native/libcst_derive/src/cstnode.rs @@ -0,0 +1,455 @@ +// Copyright (c) Meta Platforms, Inc. and affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + +use proc_macro::TokenStream; +use quote::{format_ident, quote, quote_spanned, ToTokens}; +use syn::{ + self, + parse::{Parse, ParseStream}, + parse_quote, + punctuated::{Pair, Punctuated}, + spanned::Spanned, + token::Comma, + AngleBracketedGenericArguments, Attribute, Data, DataEnum, DataStruct, DeriveInput, Field, + Fields, FieldsNamed, FieldsUnnamed, GenericArgument, Generics, Ident, Meta, MetaList, + NestedMeta, Path, PathArguments, PathSegment, Token, Type, TypePath, Visibility, +}; + +pub(crate) struct CSTNodeParams { + traits: Punctuated, +} + +#[derive(PartialEq, Eq)] +enum SupportedTrait { + ParenthesizedNode, + Codegen, + Inflate, + NoIntoPy, + Default, +} + +pub(crate) fn impl_cst_node(ast: DeriveInput, args: CSTNodeParams) -> TokenStream { + match ast.data { + Data::Enum(e) => impl_enum(args, ast.attrs, ast.vis, ast.ident, ast.generics, e), + Data::Struct(s) => impl_struct(args, ast.attrs, ast.vis, ast.ident, ast.generics, s), + Data::Union(u) => quote_spanned! { + u.union_token.span() => + compile_error!("Union type is not supported") + } + .into(), + } +} + +impl CSTNodeParams { + fn has_trait(&self, treyt: &SupportedTrait) -> bool { + self.traits.iter().any(|x| x == treyt) + } +} + +impl Parse for SupportedTrait { + fn parse(input: ParseStream) -> syn::Result { + if input.peek(Ident) { + let id: Ident = input.parse()?; + return match id.to_string().as_str() { + "ParenthesizedNode" => Ok(Self::ParenthesizedNode), + "Codegen" => Ok(Self::Codegen), + "Inflate" => Ok(Self::Inflate), + "NoIntoPy" => Ok(Self::NoIntoPy), + "Default" => Ok(Self::Default), + _ => Err(input.error("Not a supported trait to derive for cst_node")), + }; + } + Err(input.error("Pass in trait names to be derived")) + } +} + +impl Parse for CSTNodeParams { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + traits: input.parse_terminated(SupportedTrait::parse)?, + }) + } +} + +// enum Foo<'a> { +// Variant(Box>), +// } +// => +// enum Foo<'a> { +// Variant(Box>), +// } +// enum DeflatedFoo<'r, 'a> { +// Variant(Box>), +// } + +fn impl_enum( + args: CSTNodeParams, + mut attrs: Vec, + vis: Visibility, + ident: Ident, + generics: Generics, + mut e: DataEnum, +) -> TokenStream { + let deflated_vis = vis.clone(); + let deflated_ident = format_ident!("Deflated{}", &ident); + let deflated_generics: Generics = parse_quote!(<'r, 'a>); + let mut deflated_variant_tokens = vec![]; + + for var in e.variants.iter_mut() { + let (inflated_fields, deflated_fields) = impl_fields(var.fields.clone()); + var.fields = deflated_fields; + deflated_variant_tokens.push(var.to_token_stream()); + var.fields = inflated_fields; + } + add_inflated_attrs(&args, &mut attrs); + let inflated = DeriveInput { + attrs, + vis, + ident, + generics, + data: Data::Enum(e), + }; + + let deflated_attrs = get_deflated_attrs(&args); + + let gen = quote! { + #[derive(Debug, PartialEq, Eq, Clone)] + #inflated + + #[derive(Debug, PartialEq, Eq, Clone)] + #(#deflated_attrs)* + #deflated_vis enum #deflated_ident#deflated_generics { + #(#deflated_variant_tokens,)* + } + }; + gen.into() +} + +fn get_deflated_attrs(args: &CSTNodeParams) -> Vec { + let mut deflated_attrs: Vec = vec![]; + if args.has_trait(&SupportedTrait::Inflate) { + deflated_attrs.push(parse_quote!(#[derive(Inflate)])); + } + if args.has_trait(&SupportedTrait::ParenthesizedNode) { + deflated_attrs.push(parse_quote!(#[derive(ParenthesizedDeflatedNode)])) + } + if args.has_trait(&SupportedTrait::Default) { + deflated_attrs.push(parse_quote!(#[derive(Default)])); + } + deflated_attrs +} + +fn add_inflated_attrs(args: &CSTNodeParams, attrs: &mut Vec) { + if args.has_trait(&SupportedTrait::Codegen) { + attrs.push(parse_quote!(#[derive(Codegen)])); + } + if args.has_trait(&SupportedTrait::ParenthesizedNode) { + attrs.push(parse_quote!(#[derive(ParenthesizedNode)])); + } + if args.has_trait(&SupportedTrait::Default) { + attrs.push(parse_quote!(#[derive(Default)])); + } + if !args.has_trait(&SupportedTrait::NoIntoPy) { + attrs.push(parse_quote!(#[cfg_attr(feature = "py", derive(TryIntoPy))])); + } +} + +// pub struct Foo<'a> { +// pub bar: Bar<'a>, +// pub value: &'a str, +// pub whitespace_after: SimpleWhitespace<'a>, +// pub(crate) tok: Option, +// } +// => +// pub struct Foo<'a> { +// pub bar: Bar<'a>, +// pub value: &'a str, +// pub whitespace_after: SimpleWhitespace<'a>, +// } +// struct DeflatedFoo<'r, 'a> { +// pub bar: DeflatedBar<'r, 'a>, +// pub value: &'a str, +// pub tok: Option> +// } + +fn impl_struct( + args: CSTNodeParams, + mut attrs: Vec, + vis: Visibility, + ident: Ident, + generics: Generics, + mut s: DataStruct, +) -> TokenStream { + let deflated_vis = vis.clone(); + let deflated_ident = format_ident!("Deflated{}", &ident); + let deflated_generics: Generics = parse_quote!(<'r, 'a>); + + let (inflated_fields, deflated_fields) = impl_fields(s.fields); + s.fields = inflated_fields; + + add_inflated_attrs(&args, &mut attrs); + + let inflated = DeriveInput { + attrs, + vis, + ident, + generics, + data: Data::Struct(s), + }; + + let deflated_attrs = get_deflated_attrs(&args); + + let gen = quote! { + #[derive(Debug, PartialEq, Eq, Clone)] + #inflated + + #[derive(Debug, PartialEq, Eq, Clone)] + #(#deflated_attrs)* + #deflated_vis struct #deflated_ident#deflated_generics + #deflated_fields + + }; + gen.into() +} + +fn impl_fields(fields: Fields) -> (Fields, Fields) { + match &fields { + Fields::Unnamed(fs) => { + let deflated_fields = impl_unnamed_fields(fs.clone()); + (fields, Fields::Unnamed(deflated_fields)) + } + Fields::Named(fs) => impl_named_fields(fs.clone()), + Fields::Unit => (Fields::Unit, Fields::Unit), + } +} + +fn impl_unnamed_fields(mut deflated_fields: FieldsUnnamed) -> FieldsUnnamed { + let mut added_lifetime = false; + deflated_fields.unnamed = deflated_fields + .unnamed + .into_pairs() + .map(|pair| { + let (deflated, lifetime) = make_into_deflated(pair); + added_lifetime |= lifetime; + deflated + }) + .collect(); + + // Make sure all Deflated* types have 'r 'a lifetime params + if !added_lifetime { + deflated_fields.unnamed.push(Field { + vis: Visibility::Inherited, + ty: parse_quote!(std::marker::PhantomData<&'r &'a ()>), + attrs: Default::default(), + colon_token: Default::default(), + ident: Default::default(), + }); + } + deflated_fields +} + +fn impl_named_fields(mut fields: FieldsNamed) -> (Fields, Fields) { + let mut deflated_fields = fields.clone(); + let mut added_lifetime = false; + // Drop whitespace fields from deflated fields + // And add lifetimes to tokenref fields + deflated_fields.named = deflated_fields + .named + .into_pairs() + .filter(|pair| { + let id = pair.value().ident.as_ref().unwrap().to_string(); + !id.contains("whitespace") + && id != "footer" + && id != "header" + && id != "leading_lines" + && id != "lines_after_decorators" + }) + .map(|pair| { + if is_builtin(pair.value()) { + pair + } else { + let (deflated, lifetime) = make_into_deflated(pair); + added_lifetime |= lifetime; + deflated + } + }) + .map(|pair| { + let (mut val, punct) = pair.into_tuple(); + val.attrs = val.attrs.into_iter().filter(is_not_intopy_attr).collect(); + Pair::new(val, punct) + }) + .collect(); + + // Make sure all Deflated* types have 'r 'a lifetime params + if !added_lifetime { + deflated_fields.named.push(Field { + attrs: Default::default(), + vis: Visibility::Inherited, + ident: Some(parse_quote!(_phantom)), + colon_token: Default::default(), + ty: parse_quote!(std::marker::PhantomData<&'r &'a ()>), + }); + } + + // Drop tokenref fields from inflated fields + fields.named = fields + .named + .into_pairs() + .filter(|pair| !is_token_ref(pair.value())) + .collect(); + + (Fields::Named(fields), Fields::Named(deflated_fields)) +} + +fn is_builtin(field: &Field) -> bool { + get_pathseg(&field.ty) + .map(|seg| { + let segstr = seg.ident.to_string(); + segstr == "str" || segstr == "bool" || segstr == "String" + }) + .unwrap_or_default() +} + +fn is_token_ref(field: &Field) -> bool { + if let Some(seg) = rightmost_path_segment(&field.ty) { + return format!("{}", seg.ident) == "TokenRef"; + } + false +} + +// foo::bar -> foo::Deflatedbar<'r, 'a> +fn make_into_deflated(mut pair: Pair) -> (Pair, bool) { + let mut added_lifetime = true; + if let Some(seg) = rightmost_path_segment_mut(&mut pair.value_mut().ty) { + let seg_name = seg.ident.to_string(); + if seg_name != "TokenRef" { + seg.ident = format_ident!("Deflated{}", seg_name); + } + match seg.arguments { + PathArguments::None => { + seg.arguments = PathArguments::AngleBracketed(parse_quote!(<'r, 'a>)); + } + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + ref mut args, .. + }) => { + args.insert(0, parse_quote!('r)); + } + _ => todo!(), + } + } else { + added_lifetime = false; + } + (pair, added_lifetime) +} + +// foo::bar::baz> -> baz> +fn get_pathseg(ty: &Type) -> Option<&PathSegment> { + match ty { + Type::Path(TypePath { path, .. }) => path.segments.last(), + _ => None, + } +} + +// foo::bar::baz> -> quux<'a> +fn rightmost_path_segment(ty: &Type) -> Option<&PathSegment> { + let mut candidate = get_pathseg(ty); + loop { + if let Some(pathseg) = candidate { + if let PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }) = + &pathseg.arguments + { + if let Some(GenericArgument::Type(t)) = args.last() { + candidate = get_pathseg(t); + continue; + } + } + } + break; + } + candidate +} + +fn get_pathseg_mut(ty: &mut Type) -> Option<&mut PathSegment> { + match ty { + Type::Path(TypePath { path, .. }) => path.segments.last_mut(), + _ => None, + } +} + +fn has_more_mut(candidate: &Option<&mut PathSegment>) -> bool { + if let Some(PathArguments::AngleBracketed(AngleBracketedGenericArguments { + ref args, .. + })) = candidate.as_ref().map(|c| &c.arguments) + { + matches!(args.last(), Some(GenericArgument::Type(_))) + } else { + false + } +} + +fn rightmost_path_segment_mut(ty: &mut Type) -> Option<&mut PathSegment> { + let mut candidate = get_pathseg_mut(ty); + + while has_more_mut(&candidate) { + candidate = match candidate.unwrap().arguments { + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + ref mut args, .. + }) => { + if let Some(GenericArgument::Type(t)) = args.last_mut() { + get_pathseg_mut(t) + } else { + unreachable!(); + } + } + _ => unreachable!(), + }; + } + + candidate +} + +fn is_not_intopy_attr(attr: &Attribute) -> bool { + let path = &attr.path; + // support #[cfg_attr(feature="py", skip_py)] + if path.is_ident("cfg_attr") { + match attr.parse_meta() { + Ok(Meta::List(MetaList { nested, .. })) => { + for meta in nested { + if let NestedMeta::Meta(Meta::Path(path)) = meta { + return !is_intopy_attr_path(&path); + } + } + } + _ => return false, + } + } + !is_intopy_attr_path(path) +} + +fn is_intopy_attr_path(path: &Path) -> bool { + path.is_ident("skip_py") || path.is_ident("no_py_default") +} + +#[test] +fn trybuild() { + let t = trybuild::TestCases::new(); + t.pass("tests/pass/*.rs"); +} + +#[test] +fn test_is_not_intopy_attr() { + assert!(!is_not_intopy_attr(&parse_quote!(#[skip_py]))); + assert!(!is_not_intopy_attr(&parse_quote!(#[no_py_default]))); + assert!(!is_not_intopy_attr( + &parse_quote!(#[cfg_attr(foo="bar",skip_py)]) + )); + assert!(!is_not_intopy_attr( + &parse_quote!(#[cfg_attr(foo="bar",no_py_default)]) + )); + assert!(is_not_intopy_attr(&parse_quote!(#[skippy]))); + assert!(is_not_intopy_attr( + &parse_quote!(#[cfg_attr(foo="bar",skippy)]) + )); +} diff --git a/native/libcst_derive/src/inflate.rs b/native/libcst_derive/src/inflate.rs index e457b068..9a166bdc 100644 --- a/native/libcst_derive/src/inflate.rs +++ b/native/libcst_derive/src/inflate.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree use proc_macro::TokenStream; -use quote::{quote, quote_spanned}; +use quote::{format_ident, quote, quote_spanned}; use syn::{self, spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; pub(crate) fn impl_inflate(ast: &DeriveInput) -> TokenStream { @@ -55,11 +55,19 @@ fn impl_inflate_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { } let ident = &ast.ident; let generics = &ast.generics; + let ident_str = ident.to_string(); + let inflated_ident = format_ident!( + "{}", + ident_str + .strip_prefix("Deflated") + .expect("Cannot implement Inflate on a non-Deflated item") + ); let gen = quote! { - impl<'a> Inflate<'a> for #ident #generics { - fn inflate(mut self, config: & crate::tokenizer::whitespace_parser::Config<'a>) -> std::result::Result { + impl#generics Inflate<'a> for #ident #generics { + type Inflated = #inflated_ident <'a>; + fn inflate(mut self, config: & crate::tokenizer::whitespace_parser::Config<'a>) -> std::result::Result { match self { - #(Self::#varnames(x) => Ok(Self::#varnames(x.inflate(config)?)),)* + #(Self::#varnames(x) => Ok(Self::Inflated::#varnames(x.inflate(config)?)),)* } } } diff --git a/native/libcst_derive/src/lib.rs b/native/libcst_derive/src/lib.rs index 3686eaa2..b7aafeba 100644 --- a/native/libcst_derive/src/lib.rs +++ b/native/libcst_derive/src/lib.rs @@ -11,8 +11,11 @@ mod codegen; use codegen::impl_codegen; mod into_py; use into_py::impl_into_py; +mod cstnode; +use cstnode::{impl_cst_node, CSTNodeParams}; use proc_macro::TokenStream; +use syn::{parse_macro_input, DeriveInput}; #[proc_macro_derive(Inflate)] pub fn inflate_derive(input: TokenStream) -> TokenStream { @@ -22,11 +25,16 @@ pub fn inflate_derive(input: TokenStream) -> TokenStream { #[proc_macro_derive(ParenthesizedNode)] pub fn parenthesized_node_derive(input: TokenStream) -> TokenStream { - impl_parenthesized_node(&syn::parse(input).unwrap()) + impl_parenthesized_node(&syn::parse(input).unwrap(), false) +} + +#[proc_macro_derive(ParenthesizedDeflatedNode)] +pub fn parenthesized_deflated_node_derive(input: TokenStream) -> TokenStream { + impl_parenthesized_node(&syn::parse(input).unwrap(), true) } #[proc_macro_derive(Codegen)] -pub fn parenthesized_node_codegen(input: TokenStream) -> TokenStream { +pub fn codegen_derive(input: TokenStream) -> TokenStream { impl_codegen(&syn::parse(input).unwrap()) } @@ -34,3 +42,9 @@ pub fn parenthesized_node_codegen(input: TokenStream) -> TokenStream { pub fn into_py(input: TokenStream) -> TokenStream { impl_into_py(&syn::parse(input).unwrap()) } + +#[proc_macro_attribute] +pub fn cst_node(args: TokenStream, input: TokenStream) -> TokenStream { + let args = parse_macro_input!(args as CSTNodeParams); + impl_cst_node(parse_macro_input!(input as DeriveInput), args) +} diff --git a/native/libcst_derive/src/parenthesized_node.rs b/native/libcst_derive/src/parenthesized_node.rs index 9165dd1b..edc4b380 100644 --- a/native/libcst_derive/src/parenthesized_node.rs +++ b/native/libcst_derive/src/parenthesized_node.rs @@ -5,12 +5,14 @@ use proc_macro::TokenStream; use quote::{quote, quote_spanned}; -use syn::{spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; +use syn::{ + parse_quote, spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed, Ident, +}; -pub(crate) fn impl_parenthesized_node(ast: &DeriveInput) -> TokenStream { +pub(crate) fn impl_parenthesized_node(ast: &DeriveInput, deflated: bool) -> TokenStream { match &ast.data { - Data::Enum(e) => impl_enum(ast, e), - Data::Struct(_) => impl_struct(ast), + Data::Enum(e) => impl_enum(ast, e, deflated), + Data::Struct(_) => impl_struct(ast, deflated), Data::Union(u) => quote_spanned! { u.union_token.span() => compile_error!("Union type is not supported") @@ -19,18 +21,43 @@ pub(crate) fn impl_parenthesized_node(ast: &DeriveInput) -> TokenStream { } } -fn impl_struct(ast: &DeriveInput) -> TokenStream { +fn idents(deflated: bool) -> (Ident, Ident, Ident) { + let treyt: Ident = if deflated { + parse_quote!(ParenthesizedDeflatedNode) + } else { + parse_quote!(ParenthesizedNode) + }; + let leftparen: Ident = if deflated { + parse_quote!(DeflatedLeftParen) + } else { + parse_quote!(LeftParen) + }; + let rightparen: Ident = if deflated { + parse_quote!(DeflatedRightParen) + } else { + parse_quote!(RightParen) + }; + (treyt, leftparen, rightparen) +} + +fn impl_struct(ast: &DeriveInput, deflated: bool) -> TokenStream { let ident = &ast.ident; - let generics = &ast.generics; + let generics = if deflated { + parse_quote!(<'r, 'a>) + } else { + ast.generics.clone() + }; + + let (treyt, leftparen, rightparen) = idents(deflated); let gen = quote! { - impl<'a> ParenthesizedNode<'a> for #ident #generics { - fn lpar(&self) -> &Vec> { + impl#generics #treyt#generics for #ident #generics { + fn lpar(&self) -> &Vec<#leftparen#generics> { &self.lpar } - fn rpar(&self) -> &Vec> { + fn rpar(&self) -> &Vec<#rightparen#generics> { &self.rpar } - fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { + fn with_parens(self, left: #leftparen#generics, right: #rightparen#generics) -> Self { let mut lpar = self.lpar; let mut rpar = self.rpar; lpar.insert(0, left); @@ -43,7 +70,7 @@ fn impl_struct(ast: &DeriveInput) -> TokenStream { gen.into() } -fn impl_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { +fn impl_enum(ast: &DeriveInput, e: &DataEnum, deflated: bool) -> TokenStream { let mut varnames = vec![]; for var in e.variants.iter() { match &var.fields { @@ -74,20 +101,25 @@ fn impl_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { } } let ident = &ast.ident; - let generics = &ast.generics; + let generics = if deflated { + parse_quote!(<'r, 'a>) + } else { + ast.generics.clone() + }; + let (treyt, leftparen, rightparen) = idents(deflated); let gen = quote! { - impl<'a> ParenthesizedNode<'a> for #ident #generics { - fn lpar(&self) -> &Vec> { + impl#generics #treyt#generics for #ident #generics { + fn lpar(&self) -> &Vec<#leftparen#generics> { match self { #(Self::#varnames(x) => x.lpar(),)* } } - fn rpar(&self) -> &Vec> { + fn rpar(&self) -> &Vec<#rightparen#generics> { match self { #(Self::#varnames(x) => x.rpar(),)* } } - fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { + fn with_parens(self, left: #leftparen#generics, right: #rightparen#generics) -> Self { match self { #(Self::#varnames(x) => Self::#varnames(x.with_parens(left, right)),)* } diff --git a/native/libcst_derive/tests/pass/minimal_cst.rs b/native/libcst_derive/tests/pass/minimal_cst.rs new file mode 100644 index 00000000..104b2e11 --- /dev/null +++ b/native/libcst_derive/tests/pass/minimal_cst.rs @@ -0,0 +1,127 @@ +// Copyright (c) Meta Platforms, Inc. and affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + +use libcst_derive::{cst_node, Codegen}; + +pub enum Error {} + +type TokenRef<'r, 'a> = &'r &'a str; +pub type Result = std::result::Result; + +pub struct Config<'a> { + #[allow(dead_code)] + foo: &'a str, +} +pub trait Inflate<'a> +where + Self: Sized, +{ + type Inflated; + fn inflate(self, config: &Config<'a>) -> Result; +} + +impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box { + type Inflated = Box; + fn inflate(self, config: &Config<'a>) -> Result { + match (*self).inflate(config) { + Ok(a) => Ok(Box::new(a)), + Err(e) => Err(e), + } + } +} + +pub struct CodegenState<'a> { + #[allow(dead_code)] + foo: &'a str, +} +pub trait Codegen<'a> { + fn codegen(&self, state: &mut CodegenState<'a>); +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct WS<'a> { + pub last_line: &'a str, +} + +#[cst_node] +pub struct Parameters<'a> { + pub params: Vec>, + pub foo: Param<'a>, +} + +impl<'r, 'a> Inflate<'a> for DeflatedParameters<'r, 'a> { + type Inflated = Parameters<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let params = vec![]; + #[allow(clippy::blacklisted_name)] + let foo = self.foo.inflate(config)?; + Ok(Self::Inflated { params, foo }) + } +} + +#[cst_node] +pub struct Param<'a> { + pub star: Option<&'a str>, + pub(crate) star_tok: Option>, +} + +impl<'r, 'a> Inflate<'a> for DeflatedParam<'r, 'a> { + type Inflated = Param<'a>; + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(Self::Inflated { star: self.star }) + } +} + +impl<'a> Codegen<'a> for Param<'a> { + fn codegen(&self, _state: &mut CodegenState<'a>) {} +} + +#[cst_node] +pub struct BitOr<'a> { + pub whitespace_before: WS<'a>, + pub whitespace_after: WS<'a>, + + pub(crate) tok: TokenRef<'a>, +} + +#[cst_node] +pub enum CompOp<'a> { + LessThan { + whitespace_before: WS<'a>, + tok: TokenRef<'a>, + }, + GreaterThan { + whitespace_after: WS<'a>, + tok: TokenRef<'a>, + }, +} + +impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { + type Inflated = CompOp<'a>; + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(match self { + Self::LessThan { tok: _, .. } => Self::Inflated::LessThan { + whitespace_before: WS { last_line: "yo" }, + }, + Self::GreaterThan { tok: _, .. } => Self::Inflated::GreaterThan { + whitespace_after: WS { last_line: "" }, + }, + }) + } +} + +impl<'a> Codegen<'a> for CompOp<'a> { + fn codegen(&self, _state: &mut CodegenState<'a>) {} +} + +#[cst_node(Codegen)] +enum Expr<'a> { + #[allow(dead_code)] + One(Box>), + #[allow(dead_code)] + Two(CompOp<'a>), +} + +fn main() {} diff --git a/native/libcst_derive/tests/pass/simple.rs b/native/libcst_derive/tests/pass/simple.rs new file mode 100644 index 00000000..838f3914 --- /dev/null +++ b/native/libcst_derive/tests/pass/simple.rs @@ -0,0 +1,54 @@ +// Copyright (c) Meta Platforms, Inc. and affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree + +use libcst_derive::cst_node; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct WS<'a>(&'a str); + +type TokenRef<'r, 'a> = &'r &'a str; + +#[cst_node] +pub enum Foo<'a> { + One(One<'a>), + Two(Box>), +} + +#[cst_node] +pub struct One<'a> { + pub two: Box>, + pub header: WS<'a>, + + pub(crate) newline_tok: TokenRef<'a>, +} + +#[cst_node] +pub struct Two<'a> { + pub whitespace_before: WS<'a>, + pub(crate) tok: TokenRef<'a>, +} + +#[cst_node] +struct Thin<'a> { + pub whitespace: WS<'a>, +} + +#[cst_node] +struct Value<'a> { + pub value: &'a str, +} + +#[cst_node] +struct Empty {} + +#[cst_node] +enum Smol<'a> { + #[allow(dead_code)] + Thin(Thin<'a>), + #[allow(dead_code)] + Empty(Empty), +} + +fn main() {} From ebe1851c2b64beb151e7df24bf8cfe4bb20a3dae Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 13 Jun 2022 16:52:31 +0100 Subject: [PATCH 252/632] Add support for PEP-646 (#696) --- libcst/_nodes/expression.py | 23 ++++++- libcst/_nodes/tests/base.py | 2 +- libcst/_nodes/tests/test_funcdef.py | 94 ++++++++++++++++++++++++++ libcst/_typed_visitor.py | 18 ++++- libcst/matchers/__init__.py | 42 +++++++++++- libcst/matchers/_return_types.py | 2 +- native/libcst/src/nodes/expression.rs | 27 +++++++- native/libcst/src/parser/grammar.rs | 33 ++++++++- native/libcst/tests/fixtures/pep646.py | 37 ++++++++++ 9 files changed, 269 insertions(+), 9 deletions(-) create mode 100644 native/libcst/tests/fixtures/pep646.py diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index b65eb50c..89a9d806 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -1438,10 +1438,29 @@ class Index(BaseSlice): #: The index value itself. value: BaseExpression + #: An optional string with an asterisk appearing before the name. This is + #: expanded into variable number of positional arguments. See PEP-646 + star: Optional[Literal["*"]] = None + + #: Whitespace after the ``star`` (if it exists), but before the ``value``. + whitespace_after_star: Optional[BaseParenthesizableWhitespace] = None + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "Index": - return Index(value=visit_required(self, "value", self.value, visitor)) + return Index( + star=self.star, + whitespace_after_star=visit_optional( + self, "whitespace_after_star", self.whitespace_after_star, visitor + ), + value=visit_required(self, "value", self.value, visitor), + ) def _codegen_impl(self, state: CodegenState) -> None: + star = self.star + if star is not None: + state.add_token(star) + ws = self.whitespace_after_star + if ws is not None: + ws._codegen(state) self.value._codegen(state) @@ -2785,7 +2804,7 @@ class DictElement(BaseDictElement): @add_slots @dataclass(frozen=True) -class StarredElement(BaseElement, _BaseParenthesizedNode): +class StarredElement(BaseElement, BaseExpression, _BaseParenthesizedNode): """ A starred ``*value`` element that expands to represent multiple values in a literal :class:`List`, :class:`Tuple`, or :class:`Set`. diff --git a/libcst/_nodes/tests/base.py b/libcst/_nodes/tests/base.py index 8245e301..65e7059d 100644 --- a/libcst/_nodes/tests/base.py +++ b/libcst/_nodes/tests/base.py @@ -239,7 +239,7 @@ class CSTNodeTest(UnitTest): def assert_parses( self, code: str, - parser: Callable[[str], cst.BaseExpression], + parser: Callable[[str], cst.CSTNode], expect_success: bool, ) -> None: if not expect_success: diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 5d5a80ac..7dedb6b8 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -701,6 +701,81 @@ class FunctionDefCreationTest(CSTNodeTest): ) ) def test_valid(self, **kwargs: Any) -> None: + if not is_native() and kwargs.get("native_only", False): + self.skipTest("Disabled for native parser") + if "native_only" in kwargs: + kwargs.pop("native_only") + self.validate_node(**kwargs) + + @data_provider( + ( + # PEP 646 + { + "node": cst.FunctionDef( + name=cst.Name(value="foo"), + params=cst.Parameters( + params=[], + star_arg=cst.Param( + star="*", + name=cst.Name("a"), + annotation=cst.Annotation( + cst.StarredElement(value=cst.Name("b")), + whitespace_before_indicator=cst.SimpleWhitespace(""), + ), + ), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + "parser": parse_statement, + "code": "def foo(*a: *b): pass\n", + }, + { + "node": cst.FunctionDef( + name=cst.Name(value="foo"), + params=cst.Parameters( + params=[], + star_arg=cst.Param( + star="*", + name=cst.Name("a"), + annotation=cst.Annotation( + cst.StarredElement( + value=cst.Subscript( + value=cst.Name("tuple"), + slice=[ + cst.SubscriptElement( + cst.Index(cst.Name("int")), + comma=cst.Comma(), + ), + cst.SubscriptElement( + cst.Index( + value=cst.Name("Ts"), + star="*", + whitespace_after_star=cst.SimpleWhitespace( + "" + ), + ), + comma=cst.Comma(), + ), + cst.SubscriptElement( + cst.Index(cst.Ellipsis()) + ), + ], + ) + ), + whitespace_before_indicator=cst.SimpleWhitespace(""), + ), + ), + ), + body=cst.SimpleStatementSuite((cst.Pass(),)), + ), + "parser": parse_statement, + "code": "def foo(*a: *tuple[int,*Ts,...]): pass\n", + }, + ) + ) + def test_valid_native(self, **kwargs: Any) -> None: + if not is_native(): + self.skipTest("Disabled for native parser") self.validate_node(**kwargs) @data_provider( @@ -2045,3 +2120,22 @@ class FunctionDefParserTest(CSTNodeTest): if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) + + @data_provider( + ( + {"code": "A[:*b]"}, + {"code": "A[*b:]"}, + {"code": "A[*b:*b]"}, + {"code": "A[*(1:2)]"}, + {"code": "A[*:]"}, + {"code": "A[:*]"}, + {"code": "A[**b]"}, + {"code": "def f(x: *b): pass"}, + {"code": "def f(**x: *b): pass"}, + {"code": "x: *b"}, + ) + ) + def test_parse_error(self, **kwargs: Any) -> None: + if not is_native(): + self.skipTest("Skipped for non-native parser") + self.assert_parses(**kwargs, expect_success=False, parser=parse_statement) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index a880bee4..8eae0d37 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -2807,6 +2807,22 @@ class CSTTypedBaseFunctions: def leave_Index_value(self, node: "Index") -> None: pass + @mark_no_op + def visit_Index_star(self, node: "Index") -> None: + pass + + @mark_no_op + def leave_Index_star(self, node: "Index") -> None: + pass + + @mark_no_op + def visit_Index_whitespace_after_star(self, node: "Index") -> None: + pass + + @mark_no_op + def leave_Index_whitespace_after_star(self, node: "Index") -> None: + pass + @mark_no_op def visit_Integer(self, node: "Integer") -> Optional[bool]: pass @@ -7056,7 +7072,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_StarredElement( self, original_node: "StarredElement", updated_node: "StarredElement" - ) -> Union["BaseElement", FlattenSentinel["BaseElement"], RemovalSentinel]: + ) -> "BaseExpression": return updated_node @mark_no_op diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 9602de41..be59a4c9 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -7364,6 +7364,46 @@ class Index(BaseSlice, BaseMatcherNode): OneOf[BaseExpressionMatchType], AllOf[BaseExpressionMatchType], ] = DoNotCare() + star: Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + ] + ], + AllOf[ + Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + ] + ], + ] = DoNotCare() + whitespace_after_star: Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + ] + ], + AllOf[ + Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + ] + ], + ] = DoNotCare() metadata: Union[ MetadataMatchType, DoNotCareSentinel, @@ -13644,7 +13684,7 @@ class StarredDictElement(BaseDictElement, BaseMatcherNode): @dataclass(frozen=True, eq=False, unsafe_hash=False) -class StarredElement(BaseElement, BaseMatcherNode): +class StarredElement(BaseElement, BaseExpression, BaseMatcherNode): value: Union[ BaseExpressionMatchType, DoNotCareSentinel, diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index bc8e9042..57162632 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -346,7 +346,7 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { SimpleWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], Slice: BaseSlice, StarredDictElement: Union[BaseDictElement, RemovalSentinel], - StarredElement: Union[BaseElement, RemovalSentinel], + StarredElement: BaseExpression, Subscript: BaseExpression, SubscriptElement: Union[SubscriptElement, RemovalSentinel], Subtract: BaseBinaryOp, diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 261bdd67..15abe99b 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -1787,18 +1787,41 @@ pub enum BaseSlice<'a> { #[cst_node] pub struct Index<'a> { pub value: Expression<'a>, + pub star: Option<&'a str>, + pub whitespace_after_star: Option>, + + pub(crate) star_tok: Option>, } impl<'r, 'a> Inflate<'a> for DeflatedIndex<'r, 'a> { type Inflated = Index<'a>; - fn inflate(self, config: &Config<'a>) -> Result { + fn inflate(mut self, config: &Config<'a>) -> Result { + let (star, whitespace_after_star) = if let Some(star_tok) = self.star_tok.as_mut() { + ( + Some(star_tok.string), + Some(parse_parenthesizable_whitespace( + config, + &mut star_tok.whitespace_after.borrow_mut(), + )?), + ) + } else { + (None, None) + }; let value = self.value.inflate(config)?; - Ok(Self::Inflated { value }) + Ok(Self::Inflated { + value, + star, + whitespace_after_star, + }) } } impl<'a> Codegen<'a> for Index<'a> { fn codegen(&self, state: &mut CodegenState<'a>) { + if let Some(star) = self.star { + state.add_token(star); + } + self.whitespace_after_star.codegen(state); self.value.codegen(state); } } diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 25fa225f..94ee24a9 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -387,6 +387,10 @@ parser! { StarEtc(Some(StarArg::Param(Box::new( add_param_star(a, star)))), b, kw) } + / star:lit("*") a:param_no_default_star_annotation() b:param_maybe_default()* kw:kwds()? { + StarEtc(Some(StarArg::Param(Box::new( + add_param_star(a, star)))), b, kw) + } / lit("*") c:comma() b:param_maybe_default()+ kw:kwds()? { StarEtc(Some(StarArg::Star(Box::new(ParamStar {comma:c }))), b, kw) } @@ -401,6 +405,10 @@ parser! { = a:param() c:lit(",") { add_param_default(a, None, Some(c)) } / a:param() &lit(")") {a} + rule param_no_default_star_annotation() -> Param<'input, 'a> + = a:param_star_annotation() c:lit(",") { add_param_default(a, None, Some(c))} + / a:param_star_annotation() &lit(")") {a} + rule param_with_default() -> Param<'input, 'a> = a:param() def:default() c:lit(",") { add_param_default(a, Some(def), Some(c)) @@ -422,11 +430,21 @@ parser! { Param {name: n, annotation: a, ..Default::default() } } + rule param_star_annotation() -> Param<'input, 'a> + = n:name() a:star_annotation() { + Param {name: n, annotation: Some(a), ..Default::default() } + } + rule annotation() -> Annotation<'input, 'a> = col:lit(":") e:expression() { make_annotation(col, e) } + rule star_annotation() -> Annotation<'input, 'a> + = col:lit(":") e:star_expression() { + make_annotation(col, e) + } + rule default() -> (AssignEqual<'input, 'a>, Expression<'input, 'a>) = eq:lit("=") ex:expression() { (make_assign_equal(eq), ex) @@ -983,6 +1001,7 @@ parser! { rest:(c:lit(":") s:expression()? {(c, s)})? { make_slice(l, col, u, rest) } + / e:starred_expression() { make_index_from_arg(e) } / v:expression() { make_index(v) } rule atom() -> Expression<'input, 'a> @@ -2412,7 +2431,19 @@ fn make_double_starred_element<'input, 'a>( } fn make_index<'input, 'a>(value: Expression<'input, 'a>) -> BaseSlice<'input, 'a> { - BaseSlice::Index(Box::new(Index { value })) + BaseSlice::Index(Box::new(Index { + value, + star: None, + star_tok: None, + })) +} + +fn make_index_from_arg<'input, 'a>(arg: Arg<'input, 'a>) -> BaseSlice<'input, 'a> { + BaseSlice::Index(Box::new(Index { + value: arg.value, + star: Some(arg.star), + star_tok: arg.star_tok, + })) } fn make_colon<'input, 'a>(tok: TokenRef<'input, 'a>) -> Colon<'input, 'a> { diff --git a/native/libcst/tests/fixtures/pep646.py b/native/libcst/tests/fixtures/pep646.py new file mode 100644 index 00000000..6af0e6f1 --- /dev/null +++ b/native/libcst/tests/fixtures/pep646.py @@ -0,0 +1,37 @@ +# see https://github.com/python/cpython/pull/31018/files#diff-3f516b60719dd445d33225e4f316b36e85c9c51a843a0147349d11a005c55937 + +A[*b] +A[ * b ] +A[ * b , ] +A[*b] = 1 +del A[*b] + +A[* b , * b] +A[ b, *b] +A[* b, b] +A[ * b,b, b] +A[b, *b, b] + +A[*A[b, *b, b], b] +A[b, ...] +A[*A[b, ...]] + +A[ * ( 1,2,3)] +A[ * [ 1,2,3]] + +A[1:2, *t] +A[1:, *t, 1:2] +A[:, *t, :] +A[*t, :, *t] + +A[* returns_list()] +A[*returns_list(), * returns_list(), b] + +def f1(*args: *b): pass +def f2(*args: *b, arg1): pass +def f3(*args: *b, arg1: int): pass +def f4(*args: *b, arg1: int = 1): pass + +def f(*args: *tuple[int, ...]): pass +def f(*args: *tuple[int, *Ts]): pass +def f() -> tuple[int, *tuple[int, ...]]: pass \ No newline at end of file From 66676aaeecd0e6b8e7c48764f03da67f249481fd Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 13 Jun 2022 18:27:07 +0100 Subject: [PATCH 253/632] upload to prod pypi from workflow (#697) --- .github/workflows/build.yml | 271 ++---------------------------- .github/workflows/ci.yml | 203 ++++++++++++++++++++++ .github/workflows/pypi_upload.yml | 63 +++++++ 3 files changed, 281 insertions(+), 256 deletions(-) create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/pypi_upload.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d4167652..530b39ad 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,154 +1,9 @@ -name: Python CI - +name: build on: - push: - branches: - - main - pull_request: + workflow_call: jobs: -# Run unittests - test: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.7, 3.8, 3.9, "3.10"] - parser: [pure, native] - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - if: ${{ matrix.parser == 'native' }} - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - - if: ${{ matrix.parser == 'native' }} - name: Rust Cache - uses: Swatinem/rust-cache@v1.3.0 - with: - working-directory: native - - run: >- - echo LIBCST_PARSER_TYPE=${{ matrix.parser }} >> $GITHUB_ENV - - name: Run Tests - run: python setup.py test - -# Run linters - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v2 - with: - python-version: "3.10" - - uses: actions/cache@v2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - run: flake8 - - run: ufmt check . - - run: python3 -m fixit.cli.run_rules - - run: python -m slotscheck libcst - - run: ./check_copyright.sh - -# Run pyre typechecker - typecheck: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v2 - with: - python-version: "3.10" - - uses: actions/cache@v2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - name: Make sure Pyre uses the working copy - run: pip install -e . - - run: pyre --version - - run: pyre -n check - - run: python libcst/tests/test_pyre_integration.py - - run: git diff --exit-code - -# Upload test coverage - coverage: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v2 - with: - python-version: "3.10" - - uses: actions/cache@v2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - name: Generate Coverage - run: | - coverage run setup.py test - coverage xml -i - - uses: codecov/codecov-action@v2 - with: - files: coverage.xml - fail_ci_if_error: true - verbose: true - - name: Archive Coverage - uses: actions/upload-artifact@v2 - with: - name: coverage - path: coverage.xml - -# Build the docs - docs: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v2 - with: - python-version: "3.10" - - uses: actions/cache@v2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - uses: ts-graphviz/setup-graphviz@v1 - - run: sphinx-build docs/source/ docs/build/ - - name: Archive Docs - uses: actions/upload-artifact@v2 - with: - name: sphinx-docs - path: docs/build - -# Build python wheels + # Build python wheels build_matrix: name: Prepare job matrix for build job runs-on: ubuntu-latest @@ -183,126 +38,30 @@ jobs: CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_*" CIBW_ARCHS: ${{ matrix.vers }} CIBW_BUILD_VERBOSITY: 1 - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v2 - if: ${{ !contains(matrix.os, 'self-hosted') }} - with: - python-version: "3.10" - - uses: actions/cache@v2 - id: cache - if: ${{ !contains(matrix.os, 'self-hosted') }} - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Rust Cache - if: ${{ !contains(matrix.os, 'self-hosted') }} - uses: Swatinem/rust-cache@v1.3.0 - with: - working-directory: native - - name: Disable scmtools local scheme - if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} - run: >- - echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - - name: Build wheels - uses: pypa/cibuildwheel@v2.3.1 - - uses: actions/upload-artifact@v2 - with: - path: wheelhouse/*.whl - name: wheels - - pypi: - if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} - name: Upload wheels to pypi - runs-on: ubuntu-latest - needs: build steps: - uses: actions/checkout@v1 - - name: Download binary wheels - id: download - uses: actions/download-artifact@v2 - with: - name: wheels - path: wheelhouse - uses: actions/setup-python@v2 + if: ${{ !contains(matrix.os, 'self-hosted') }} with: python-version: "3.10" - uses: actions/cache@v2 id: cache + if: ${{ !contains(matrix.os, 'self-hosted') }} with: path: ${{ env.pythonLocation }} key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - name: Disable scmtools local scheme - run: >- - echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - - name: Build a source tarball - run: >- - python -m - build - --sdist - --outdir ${{ steps.download.outputs.download-path }} - - name: Publish distribution 📦 to Test PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.TEST_PYPI_API_TOKEN }} - repository_url: https://test.pypi.org/legacy/ - packages_dir: ${{ steps.download.outputs.download-path }} - -# Test rust parts - native: - name: Rust unit tests - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - components: rustfmt, clippy - name: Rust Cache + if: ${{ !contains(matrix.os, 'self-hosted') }} uses: Swatinem/rust-cache@v1.3.0 with: working-directory: native - - uses: actions/setup-python@v2 + - name: Disable scmtools local scheme + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} + run: >- + echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV + - name: Build wheels + uses: pypa/cibuildwheel@v2.3.1 + - uses: actions/upload-artifact@v2 with: - python-version: "3.10" - - name: test - uses: actions-rs/cargo@v1 - with: - command: test - args: --manifest-path=native/Cargo.toml --release - - name: test without python - if: matrix.os == 'ubuntu-latest' - uses: actions-rs/cargo@v1 - with: - command: test - args: --manifest-path=native/Cargo.toml --release --no-default-features - - name: clippy - uses: actions-rs/clippy-check@v1 - with: - token: ${{ secrets.GITHUB_TOKEN }} - args: --manifest-path=native/Cargo.toml --all-features - - rustfmt: - name: Rustfmt - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: stable - override: true - - run: rustup component add rustfmt - - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --all --manifest-path=native/Cargo.toml -- --check + path: wheelhouse/*.whl + name: wheels diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..3608bbbc --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,203 @@ +name: CI + +on: + push: + branches: + - main + pull_request: + +jobs: + # Run unittests + test: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.7, 3.8, 3.9, "3.10"] + parser: [pure, native] + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - if: ${{ matrix.parser == 'native' }} + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + - if: ${{ matrix.parser == 'native' }} + name: Rust Cache + uses: Swatinem/rust-cache@v1.3.0 + with: + working-directory: native + - run: >- + echo LIBCST_PARSER_TYPE=${{ matrix.parser }} >> $GITHUB_ENV + - name: Run Tests + run: python setup.py test + + # Run linters + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - run: flake8 + - run: ufmt check . + - run: python3 -m fixit.cli.run_rules + - run: python -m slotscheck libcst + - run: ./check_copyright.sh + + # Run pyre typechecker + typecheck: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Make sure Pyre uses the working copy + run: pip install -e . + - run: pyre --version + - run: pyre -n check + - run: python libcst/tests/test_pyre_integration.py + - run: git diff --exit-code + + # Upload test coverage + coverage: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Generate Coverage + run: | + coverage run setup.py test + coverage xml -i + - uses: codecov/codecov-action@v2 + with: + files: coverage.xml + fail_ci_if_error: true + verbose: true + - name: Archive Coverage + uses: actions/upload-artifact@v2 + with: + name: coverage + path: coverage.xml + + # Build the docs + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - uses: ts-graphviz/setup-graphviz@v1 + - run: sphinx-build docs/source/ docs/build/ + - name: Archive Docs + uses: actions/upload-artifact@v2 + with: + name: sphinx-docs + path: docs/build + + # Test rust parts + native: + name: Rust unit tests + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + toolchain: stable + components: rustfmt, clippy + - name: Rust Cache + uses: Swatinem/rust-cache@v1.3.0 + with: + working-directory: native + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: test + uses: actions-rs/cargo@v1 + with: + command: test + args: --manifest-path=native/Cargo.toml --release + - name: test without python + if: matrix.os == 'ubuntu-latest' + uses: actions-rs/cargo@v1 + with: + command: test + args: --manifest-path=native/Cargo.toml --release --no-default-features + - name: clippy + uses: actions-rs/clippy-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + args: --manifest-path=native/Cargo.toml --all-features + + rustfmt: + name: Rustfmt + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + - run: rustup component add rustfmt + - uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all --manifest-path=native/Cargo.toml -- --check diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml new file mode 100644 index 00000000..bbc491de --- /dev/null +++ b/.github/workflows/pypi_upload.yml @@ -0,0 +1,63 @@ +name: pypi_upload + +on: + release: + types: [published] + push: + branches: [main] + +permissions: + contents: read + +jobs: + build: + uses: Instagram/LibCST/.github/workflows/build.yml@main + upload_release: + name: Upload wheels to pypi + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v1 + - name: Download binary wheels + id: download + uses: actions/download-artifact@v2 + with: + name: wheels + path: wheelhouse + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Disable scmtools local scheme + run: >- + echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV + - name: Build a source tarball + run: >- + python -m + build + --sdist + --outdir ${{ steps.download.outputs.download-path }} + - name: Publish distribution 📦 to Test PyPI + if: github.event_name == 'push' + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_API_TOKEN }} + repository_url: https://test.pypi.org/legacy/ + packages_dir: ${{ steps.download.outputs.download-path }} + - name: Publish distribution 📦 to PyPI + if: github.event_name == 'release' + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + repository_url: https://pypi.org/legacy/ + packages_dir: ${{ steps.download.outputs.download-path }} From 7a6fa534fc7a61f509ff8085a00aa13eabc72108 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 13 Jun 2022 19:06:28 +0100 Subject: [PATCH 254/632] bump version to 0.4.4 --- CHANGELOG.md | 644 ++++++++++++++++++++++++++++----------------------- 1 file changed, 359 insertions(+), 285 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8cf7863e..87779ccd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,73 +1,94 @@ +# 0.4.4 - 2022-06-13 + +## New Contributors + +- @adamchainz made their first contribution in https://github.com/Instagram/LibCST/pull/688 + +## Added + +- Add package links to PyPI by @adamchainz in https://github.com/Instagram/LibCST/pull/688 +- native: add overall benchmark by @zsol in https://github.com/Instagram/LibCST/pull/692 +- Add support for PEP-646 by @zsol in https://github.com/Instagram/LibCST/pull/696 + +## Updated + +- parser: use references instead of smart pointers for Tokens by @zsol in https://github.com/Instagram/LibCST/pull/691 + # 0.4.3 - 2022-05-11 ## Fixed -* Restore the 0.4.1 behavior for libcst.helpers.get_absolute_module by @lpetre in https://github.com/Instagram/LibCST/pull/684 +- Restore the 0.4.1 behavior for libcst.helpers.get_absolute_module by @lpetre in https://github.com/Instagram/LibCST/pull/684 # 0.4.2 - 2022-05-04 ## New Contributors -* @stanislavlevin made their first contribution in https://github.com/Instagram/LibCST/pull/650 -* @dmitryvinn made their first contribution in https://github.com/Instagram/LibCST/pull/655 -* @wiyr made their first contribution in https://github.com/Instagram/LibCST/pull/669 -* @toofar made their first contribution in https://github.com/Instagram/LibCST/pull/675 + +- @stanislavlevin made their first contribution in https://github.com/Instagram/LibCST/pull/650 +- @dmitryvinn made their first contribution in https://github.com/Instagram/LibCST/pull/655 +- @wiyr made their first contribution in https://github.com/Instagram/LibCST/pull/669 +- @toofar made their first contribution in https://github.com/Instagram/LibCST/pull/675 ## Fixed -* native: Avoid crashing by making IntoPy conversion fallible by @zsol in https://github.com/Instagram/LibCST/pull/639 -* native: make sure ParserError's line is zero-indexed by @zsol in https://github.com/Instagram/LibCST/pull/681 -* Fix space validation for AsName and Await by @zsol in https://github.com/Instagram/LibCST/pull/641 -* Qualified Name Provider: Fix returned qname for symbols that are prefixes of each other by @wiyr in https://github.com/Instagram/LibCST/pull/669 -* Rename Codemod: Correct last renamed import from by @toofar in https://github.com/Instagram/LibCST/pull/675 -* Many changes to the Apply Type Comments codemod: - * Allow for skipping quotes when applying type comments by @stroxler in https://github.com/Instagram/LibCST/pull/644 - * Port pyre fixes by @stroxler in https://github.com/Instagram/LibCST/pull/651 - * Preserve as-imports when merging type annotations. by @martindemello in https://github.com/Instagram/LibCST/pull/664 - * Qualify imported symbols when the dequalified form would cause a conflict by @martindemello in https://github.com/Instagram/LibCST/pull/674 - * Add an argument to always qualify imported type annotations. by @martindemello in https://github.com/Instagram/LibCST/pull/676 + +- native: Avoid crashing by making IntoPy conversion fallible by @zsol in https://github.com/Instagram/LibCST/pull/639 +- native: make sure ParserError's line is zero-indexed by @zsol in https://github.com/Instagram/LibCST/pull/681 +- Fix space validation for AsName and Await by @zsol in https://github.com/Instagram/LibCST/pull/641 +- Qualified Name Provider: Fix returned qname for symbols that are prefixes of each other by @wiyr in https://github.com/Instagram/LibCST/pull/669 +- Rename Codemod: Correct last renamed import from by @toofar in https://github.com/Instagram/LibCST/pull/675 +- Many changes to the Apply Type Comments codemod: + - Allow for skipping quotes when applying type comments by @stroxler in https://github.com/Instagram/LibCST/pull/644 + - Port pyre fixes by @stroxler in https://github.com/Instagram/LibCST/pull/651 + - Preserve as-imports when merging type annotations. by @martindemello in https://github.com/Instagram/LibCST/pull/664 + - Qualify imported symbols when the dequalified form would cause a conflict by @martindemello in https://github.com/Instagram/LibCST/pull/674 + - Add an argument to always qualify imported type annotations. by @martindemello in https://github.com/Instagram/LibCST/pull/676 ## Added -* Create an AddTrailingCommas codemod by @stroxler in https://github.com/Instagram/LibCST/pull/643 -* Define gather global names visitor by @shannonzhu in https://github.com/Instagram/LibCST/pull/657 +- Create an AddTrailingCommas codemod by @stroxler in https://github.com/Instagram/LibCST/pull/643 +- Define gather global names visitor by @shannonzhu in https://github.com/Instagram/LibCST/pull/657 ## Updated -* Support module and package names in the codemod context by @lpetre in https://github.com/Instagram/LibCST/pull/662 -* Drop support for running libcst using a python 3.6 interpreter by @lpetre in https://github.com/Instagram/LibCST/pull/663 -* Update relative import logic to match cpython by @lpetre in https://github.com/Instagram/LibCST/pull/660 -* Scope Provider: Consider access information when computing qualified names for nodes by @lpetre in https://github.com/Instagram/LibCST/pull/682 - +- Support module and package names in the codemod context by @lpetre in https://github.com/Instagram/LibCST/pull/662 +- Drop support for running libcst using a python 3.6 interpreter by @lpetre in https://github.com/Instagram/LibCST/pull/663 +- Update relative import logic to match cpython by @lpetre in https://github.com/Instagram/LibCST/pull/660 +- Scope Provider: Consider access information when computing qualified names for nodes by @lpetre in https://github.com/Instagram/LibCST/pull/682 # 0.4.1 - 2022-01-28 ## New Contributors -* @ariebovenberg made their first contribution in https://github.com/Instagram/LibCST/pull/605 -* @sehz made their first contribution in https://github.com/Instagram/LibCST/pull/598 + +- @ariebovenberg made their first contribution in https://github.com/Instagram/LibCST/pull/605 +- @sehz made their first contribution in https://github.com/Instagram/LibCST/pull/598 ## Added -* Add docs about the native parts by @zsol in https://github.com/Instagram/LibCST/pull/601 -* Specify minimum rust toolchain version by @zsol in https://github.com/Instagram/LibCST/pull/614 -* build wheels on main branch for linux/arm64 by @zsol in https://github.com/Instagram/LibCST/pull/630 + +- Add docs about the native parts by @zsol in https://github.com/Instagram/LibCST/pull/601 +- Specify minimum rust toolchain version by @zsol in https://github.com/Instagram/LibCST/pull/614 +- build wheels on main branch for linux/arm64 by @zsol in https://github.com/Instagram/LibCST/pull/630 ## Updated -* ApplyTypeAnnotationVisitor changes - * Add support for methods with func type comment excluding self/cls by @stroxler in https://github.com/Instagram/LibCST/pull/622 - * Merge in TypeVars and Generic base classes in ApplyTypeAnnotationVisitor by @martindemello in https://github.com/Instagram/LibCST/pull/596 - * Full handling for applying type comments to Assign by @stroxler in https://github.com/Instagram/LibCST/pull/599 - * Add support for For and With by @stroxler in https://github.com/Instagram/LibCST/pull/607 - * Support FunctionDef transformations by @stroxler in https://github.com/Instagram/LibCST/pull/610 -* change pyo3 as optional dependency in native Python Parser by @sehz in https://github.com/Instagram/LibCST/pull/598 -* add slots to base classes, @add_slots takes bases into account by @ariebovenberg in https://github.com/Instagram/LibCST/pull/605 -* [native] Box most enums by @zsol in https://github.com/Instagram/LibCST/pull/632 -* [native] Return tuples instead of lists in CST nodes by @zsol in https://github.com/Instagram/LibCST/pull/631 + +- ApplyTypeAnnotationVisitor changes + - Add support for methods with func type comment excluding self/cls by @stroxler in https://github.com/Instagram/LibCST/pull/622 + - Merge in TypeVars and Generic base classes in ApplyTypeAnnotationVisitor by @martindemello in https://github.com/Instagram/LibCST/pull/596 + - Full handling for applying type comments to Assign by @stroxler in https://github.com/Instagram/LibCST/pull/599 + - Add support for For and With by @stroxler in https://github.com/Instagram/LibCST/pull/607 + - Support FunctionDef transformations by @stroxler in https://github.com/Instagram/LibCST/pull/610 +- change pyo3 as optional dependency in native Python Parser by @sehz in https://github.com/Instagram/LibCST/pull/598 +- add slots to base classes, @add_slots takes bases into account by @ariebovenberg in https://github.com/Instagram/LibCST/pull/605 +- [native] Box most enums by @zsol in https://github.com/Instagram/LibCST/pull/632 +- [native] Return tuples instead of lists in CST nodes by @zsol in https://github.com/Instagram/LibCST/pull/631 ## Fixed -* Allow trailing whitespace without newline at EOF by @zsol in https://github.com/Instagram/LibCST/pull/611 -* Handle ast.parse failures when converting function type comments by @stroxler in https://github.com/Instagram/LibCST/pull/616 -* [native] Don't redundantly nest StarredElement inside another Element by @isidentical in https://github.com/Instagram/LibCST/pull/624 -* [native] Allow unparenthesized tuples inside f-strings by @isidentical in https://github.com/Instagram/LibCST/pull/621 -* Don't require whitespace right after match by @isidentical in https://github.com/Instagram/LibCST/pull/628 -* Proxy both parentheses in some pattern matching nodes by @isidentical in https://github.com/Instagram/LibCST/pull/626 + +- Allow trailing whitespace without newline at EOF by @zsol in https://github.com/Instagram/LibCST/pull/611 +- Handle ast.parse failures when converting function type comments by @stroxler in https://github.com/Instagram/LibCST/pull/616 +- [native] Don't redundantly nest StarredElement inside another Element by @isidentical in https://github.com/Instagram/LibCST/pull/624 +- [native] Allow unparenthesized tuples inside f-strings by @isidentical in https://github.com/Instagram/LibCST/pull/621 +- Don't require whitespace right after match by @isidentical in https://github.com/Instagram/LibCST/pull/628 +- Proxy both parentheses in some pattern matching nodes by @isidentical in https://github.com/Instagram/LibCST/pull/626 # 0.4.0 - 2022-01-12 @@ -80,522 +101,575 @@ Note: the new parser is built as a native extension, so LibCST will ship with bi wheels from now on. ## Added -* Implement a Python PEG parser in Rust by @zsol in [#566](https://github.com/Instagram/LibCST/pull/566) -* implement PEP-654: except* by @zsol in [#571](https://github.com/Instagram/LibCST/pull/571) -* Implement PEP-634 - Match statement by @zsol in [#568](https://github.com/Instagram/LibCST/pull/568) -* Add instructions to codegen test failures by @stroxler in [#582](https://github.com/Instagram/LibCST/pull/582) -* Support Parenthesized With Statements by @stroxler in [#584](https://github.com/Instagram/LibCST/pull/584) -* Support relative imports in AddImportsVisitor by @martindemello in [#585](https://github.com/Instagram/LibCST/pull/585) -* Codemod for PEP 484 Assign w / type comments -> PEP 526 AnnAssign by @stroxler in [#594](https://github.com/Instagram/LibCST/pull/594) + +- Implement a Python PEG parser in Rust by @zsol in [#566](https://github.com/Instagram/LibCST/pull/566) +- implement PEP-654: except\* by @zsol in [#571](https://github.com/Instagram/LibCST/pull/571) +- Implement PEP-634 - Match statement by @zsol in [#568](https://github.com/Instagram/LibCST/pull/568) +- Add instructions to codegen test failures by @stroxler in [#582](https://github.com/Instagram/LibCST/pull/582) +- Support Parenthesized With Statements by @stroxler in [#584](https://github.com/Instagram/LibCST/pull/584) +- Support relative imports in AddImportsVisitor by @martindemello in [#585](https://github.com/Instagram/LibCST/pull/585) +- Codemod for PEP 484 Assign w / type comments -> PEP 526 AnnAssign by @stroxler in [#594](https://github.com/Instagram/LibCST/pull/594) ## Updated -* Update license headers by @zsol in [#560](https://github.com/Instagram/LibCST/pull/560) -* Use precise signature matching when inserting function type annotations by @martindemello in [#591](https://github.com/Instagram/LibCST/pull/591) + +- Update license headers by @zsol in [#560](https://github.com/Instagram/LibCST/pull/560) +- Use precise signature matching when inserting function type annotations by @martindemello in [#591](https://github.com/Instagram/LibCST/pull/591) # 0.3.23 - 2021-11-23 ## Fixed -- Fix missing string annotation references [#561](https://github.com/Instagram/LibCST/pull/561) + +- Fix missing string annotation references [#561](https://github.com/Instagram/LibCST/pull/561) # 0.3.22 - 2021-11-22 ## Added -- Add --indent-string option to `libcst.tool print` [#525](https://github.com/Instagram/LibCST/pull/525) -- Publish pre-release packages to test.pypi.org [#550](https://github.com/Instagram/LibCST/pull/550) -- Add ImportAssignment class extending Assignment to record assignments for import statements [#554](https://github.com/Instagram/LibCST/pull/554) + +- Add --indent-string option to `libcst.tool print` [#525](https://github.com/Instagram/LibCST/pull/525) +- Publish pre-release packages to test.pypi.org [#550](https://github.com/Instagram/LibCST/pull/550) +- Add ImportAssignment class extending Assignment to record assignments for import statements [#554](https://github.com/Instagram/LibCST/pull/554) ## Fixed -- Various documentation fixes [#527](https://github.com/Instagram/LibCST/pull/527), [#529](https://github.com/Instagram/LibCST/pull/529) -- Do not add imports if we added no type info in ApplyTypeAnnotationVisitor [(commit)](https://github.com/Instagram/LibCST/commit/87625d02b6cb321c9c29ba1c67d81ce954a1a396) -- Support relative imports in ApplyTypeAnnotationVisitor qualifier handling [#538](https://github.com/Instagram/LibCST/pull/538) -- Don't gather metadata if the wrapper already contains it [#545](https://github.com/Instagram/LibCST/pull/545) -- Swallow parsing errors in string annotations [#548](https://github.com/Instagram/LibCST/pull/548) -- Stop parsing string annotations when no longer in a typing call [#546](https://github.com/Instagram/LibCST/pull/546) + +- Various documentation fixes [#527](https://github.com/Instagram/LibCST/pull/527), [#529](https://github.com/Instagram/LibCST/pull/529) +- Do not add imports if we added no type info in ApplyTypeAnnotationVisitor [(commit)](https://github.com/Instagram/LibCST/commit/87625d02b6cb321c9c29ba1c67d81ce954a1a396) +- Support relative imports in ApplyTypeAnnotationVisitor qualifier handling [#538](https://github.com/Instagram/LibCST/pull/538) +- Don't gather metadata if the wrapper already contains it [#545](https://github.com/Instagram/LibCST/pull/545) +- Swallow parsing errors in string annotations [#548](https://github.com/Instagram/LibCST/pull/548) +- Stop parsing string annotations when no longer in a typing call [#546](https://github.com/Instagram/LibCST/pull/546) ## Updated -- Move find_qualified_names_for in the Assignment class [#557](https://github.com/Instagram/LibCST/pull/557) + +- Move find_qualified_names_for in the Assignment class [#557](https://github.com/Instagram/LibCST/pull/557) # 0.3.21 - 2021-09-21 ## Fixed -- Fix pyre command for type inference provider [#523](https://github.com/Instagram/LibCST/pull/523) + +- Fix pyre command for type inference provider [#523](https://github.com/Instagram/LibCST/pull/523) ## Updated -- Change codegen to treat typing.Union[Foo, NoneType] and typing.Optional[Foo] as the same [#508]((https://github.com/Instagram/LibCST/pull/508) -- Rewrite the MatchIfTrue type to be generic on _MatchIfTrueT [#512](https://github.com/Instagram/LibCST/pull/512) -- Add python3.9 to the CI [#506](https://github.com/Instagram/LibCST/pull/506) -- Various CI changes [#471](https://github.com/Instagram/LibCST/pull/471) [#510](https://github.com/Instagram/LibCST/pull/510) [#505](https://github.com/Instagram/LibCST/pull/505) [#515](https://github.com/Instagram/LibCST/pull/515) [#516](https://github.com/Instagram/LibCST/pull/516) + +- Change codegen to treat typing.Union[Foo, NoneType] and typing.Optional[Foo] as the same [#508]((https://github.com/Instagram/LibCST/pull/508) +- Rewrite the MatchIfTrue type to be generic on \_MatchIfTrueT [#512](https://github.com/Instagram/LibCST/pull/512) +- Add python3.9 to the CI [#506](https://github.com/Instagram/LibCST/pull/506) +- Various CI changes [#471](https://github.com/Instagram/LibCST/pull/471) [#510](https://github.com/Instagram/LibCST/pull/510) [#505](https://github.com/Instagram/LibCST/pull/505) [#515](https://github.com/Instagram/LibCST/pull/515) [#516](https://github.com/Instagram/LibCST/pull/516) # 0.3.20 - 2021-08-09 ## Fixed -- Don't reset subprocess environment to fix codemodding on windows [#495](https://github.com/Instagram/LibCST/pull/495) -- TypeAnnotationsVisitor: don't truncate function return type [#499](https://github.com/Instagram/LibCST/pull/499) -- Docs: Fix typo [#492](https://github.com/Instagram/LibCST/pull/492) + +- Don't reset subprocess environment to fix codemodding on windows [#495](https://github.com/Instagram/LibCST/pull/495) +- TypeAnnotationsVisitor: don't truncate function return type [#499](https://github.com/Instagram/LibCST/pull/499) +- Docs: Fix typo [#492](https://github.com/Instagram/LibCST/pull/492) # 0.3.19 - 2021-05-12 # Updated -- Return more specific QNames for assignments [#477](https://github.com/Instagram/LibCST/pull/477) -- Tie accesses from string annotation to the string node [#483](https://github.com/Instagram/LibCST/pull/483) + +- Return more specific QNames for assignments [#477](https://github.com/Instagram/LibCST/pull/477) +- Tie accesses from string annotation to the string node [#483](https://github.com/Instagram/LibCST/pull/483) + ## Fixed -- Fix leaking processes from TypeInferenceProvider [#474](https://github.com/Instagram/LibCST/pull/474) -- Fix TypeInferenceProvider breakage with empty cache [#476](https://github.com/Instagram/LibCST/pull/476) -- Fix formatting for link to QualifiedName class in docs [#480](https://github.com/Instagram/LibCST/pull/480) + +- Fix leaking processes from TypeInferenceProvider [#474](https://github.com/Instagram/LibCST/pull/474) +- Fix TypeInferenceProvider breakage with empty cache [#476](https://github.com/Instagram/LibCST/pull/476) +- Fix formatting for link to QualifiedName class in docs [#480](https://github.com/Instagram/LibCST/pull/480) # 0.3.18 - 2021-03-29 ## Added -- Add FlattenSentinel to support replacing a statement with multiple statements [#455](https://github.com/Instagram/LibCST/pull/455) -- Add BuiltinScope [#469](https://github.com/Instagram/LibCST/pull/469) -- Add FullyQualifiedNameProvider [#465](https://github.com/Instagram/LibCST/pull/465) + +- Add FlattenSentinel to support replacing a statement with multiple statements [#455](https://github.com/Instagram/LibCST/pull/455) +- Add BuiltinScope [#469](https://github.com/Instagram/LibCST/pull/469) +- Add FullyQualifiedNameProvider [#465](https://github.com/Instagram/LibCST/pull/465) ## Updated -- Split QualifiedNameProvider out from libcst.metadata.scope_provider [#464](https://github.com/Instagram/LibCST/pull/464) + +- Split QualifiedNameProvider out from libcst.metadata.scope_provider [#464](https://github.com/Instagram/LibCST/pull/464) ## Fixed -- Exception while parsing escape character in raw f-strings [#462](https://github.com/Instagram/LibCST/issues/462) + +- Exception while parsing escape character in raw f-strings [#462](https://github.com/Instagram/LibCST/issues/462) + # 0.3.17 - 2021-02-08 ## Updated -- Optimization: reduce the number of unused parallel processes [#440](https://github.com/Instagram/LibCST/pull/440) + +- Optimization: reduce the number of unused parallel processes [#440](https://github.com/Instagram/LibCST/pull/440) ## Fixed -- Walrus operator's left hand side now has STORE expression context [#443](https://github.com/Instagram/LibCST/pull/433) -- ApplyTypeAnnotationsVisitor applies parameter annotations even if no return type is declared [#445](https://github.com/Instagram/LibCST/pull/445) -- Work around Windows problem by using dummy pool for `jobs=1` [#436](https://github.com/Instagram/LibCST/pull/436) -- Remove extra unused imports added in other files [#453](https://github.com/Instagram/LibCST/pull/453) + +- Walrus operator's left hand side now has STORE expression context [#443](https://github.com/Instagram/LibCST/pull/433) +- ApplyTypeAnnotationsVisitor applies parameter annotations even if no return type is declared [#445](https://github.com/Instagram/LibCST/pull/445) +- Work around Windows problem by using dummy pool for `jobs=1` [#436](https://github.com/Instagram/LibCST/pull/436) +- Remove extra unused imports added in other files [#453](https://github.com/Instagram/LibCST/pull/453) # 0.3.16 - 2020-12-16 ## Added -- Support PEP-604 style unions in decorator annotations [#429](https://github.com/Instagram/LibCST/pull/429) -- Gathering exports in augmented assignment statements [#426](https://github.com/Instagram/LibCST/pull/426) + +- Support PEP-604 style unions in decorator annotations [#429](https://github.com/Instagram/LibCST/pull/429) +- Gathering exports in augmented assignment statements [#426](https://github.com/Instagram/LibCST/pull/426) ## Fixed -- Don't allow out of order accesses in the global scope [#431](https://github.com/Instagram/LibCST/pull/431) -- Handle scope ordering in For statements [#430](https://github.com/Instagram/LibCST/pull/430) -- Fix for not parsing subscripts such as `cast()["from"]` [#428](https://github.com/Instagram/LibCST/pull/428) -- Walrus operator's left hand side now has STORE expression context [#433](https://github.com/Instagram/LibCST/pull/433) + +- Don't allow out of order accesses in the global scope [#431](https://github.com/Instagram/LibCST/pull/431) +- Handle scope ordering in For statements [#430](https://github.com/Instagram/LibCST/pull/430) +- Fix for not parsing subscripts such as `cast()["from"]` [#428](https://github.com/Instagram/LibCST/pull/428) +- Walrus operator's left hand side now has STORE expression context [#433](https://github.com/Instagram/LibCST/pull/433) # 0.3.15 - 2020-12-01 ## Added -- Support Named Unicode Characters and yield in f-strings [#424](https://github.com/Instagram/LibCST/pull/424) + +- Support Named Unicode Characters and yield in f-strings [#424](https://github.com/Instagram/LibCST/pull/424) ## Fixed -- Assignment/access ordering in comprehensions [#423](https://github.com/Instagram/LibCST/pull/423) -- Referencing of remaining objects in cast() [#422](https://github.com/Instagram/LibCST/pull/422) + +- Assignment/access ordering in comprehensions [#423](https://github.com/Instagram/LibCST/pull/423) +- Referencing of remaining objects in cast() [#422](https://github.com/Instagram/LibCST/pull/422) # 0.3.14 - 2020-11-18 ## Fixed -- Fix is_annotation for types used in classdef base and assign value [#406](https://github.com/Instagram/LibCST/pull/406) -- Visit concatenated f-strings during scope analysis [#411](https://github.com/Instagram/LibCST/pull/411) -- Correct handling of walrus operator in function args [#417](https://github.com/Instagram/LibCST/pull/417) -- Allow generator expressions in f-strings [#419](https://github.com/Instagram/LibCST/pull/419) -- Keep track of assignment/access ordering during scope analysis [#413](https://github.com/Instagram/LibCST/pull/413) -- Handle string type references in cast() during scope analysis [#418](https://github.com/Instagram/LibCST/pull/418) + +- Fix is_annotation for types used in classdef base and assign value [#406](https://github.com/Instagram/LibCST/pull/406) +- Visit concatenated f-strings during scope analysis [#411](https://github.com/Instagram/LibCST/pull/411) +- Correct handling of walrus operator in function args [#417](https://github.com/Instagram/LibCST/pull/417) +- Allow generator expressions in f-strings [#419](https://github.com/Instagram/LibCST/pull/419) +- Keep track of assignment/access ordering during scope analysis [#413](https://github.com/Instagram/LibCST/pull/413) +- Handle string type references in cast() during scope analysis [#418](https://github.com/Instagram/LibCST/pull/418) # 0.3.13 - 2020-10-12 ## Fixed -- Use correct type for AugAssign and AnnAssign target [#396](https://github.com/Instagram/LibCST/pull/396) -- Support string annotations for type aliases [#401](https://github.com/Instagram/LibCST/pull/401) + +- Use correct type for AugAssign and AnnAssign target [#396](https://github.com/Instagram/LibCST/pull/396) +- Support string annotations for type aliases [#401](https://github.com/Instagram/LibCST/pull/401) # 0.3.12 - 2020-10-01 ## Fixed -- fix RemoveImportsVisitor crash when ImportAlias is inserted without comma [#397](https://github.com/Instagram/LibCST/pull/397) -- Provide STORE for {Class,Function}Def.name in ExpressionContextProvider [#394](https://github.com/Instagram/LibCST/pull/394) + +- fix RemoveImportsVisitor crash when ImportAlias is inserted without comma [#397](https://github.com/Instagram/LibCST/pull/397) +- Provide STORE for {Class,Function}Def.name in ExpressionContextProvider [#394](https://github.com/Instagram/LibCST/pull/394) # 0.3.11 - 2020-09-29 ## Added -- Implement TypeOf matcher [#384](https://github.com/Instagram/LibCST/pull/384) + +- Implement TypeOf matcher [#384](https://github.com/Instagram/LibCST/pull/384) ## Updated -- Update return type of ParentNodeProvider to be CSTNode [#377](https://github.com/Instagram/LibCST/pull/377) -- Add source code links to each class/function [#378](https://github.com/Instagram/LibCST/pull/378) + +- Update return type of ParentNodeProvider to be CSTNode [#377](https://github.com/Instagram/LibCST/pull/377) +- Add source code links to each class/function [#378](https://github.com/Instagram/LibCST/pull/378) ## Fixed -- Removing an import alias with a trailing standalone comment should preserve the comment [#392](https://github.com/Instagram/LibCST/pull/392) + +- Removing an import alias with a trailing standalone comment should preserve the comment [#392](https://github.com/Instagram/LibCST/pull/392) # 0.3.10 - 2020-09-17 ## Added -- Handle string annotations in ScopeProvider [#373](https://github.com/Instagram/LibCST/pull/373) -- Add is_annotation subtype for Access inreferences. [#372](https://github.com/Instagram/LibCST/pull/372) + +- Handle string annotations in ScopeProvider [#373](https://github.com/Instagram/LibCST/pull/373) +- Add is_annotation subtype for Access inreferences. [#372](https://github.com/Instagram/LibCST/pull/372) ## Updated -- Call pyre query with noninteractive logging [#371](https://github.com/Instagram/LibCST/pull/371) -- Replace matchers with explicit visitation in gatherers [#366](https://github.com/Instagram/LibCST/pull/366) -- Include missing test data in install [#365](https://github.com/Instagram/LibCST/pull/365) + +- Call pyre query with noninteractive logging [#371](https://github.com/Instagram/LibCST/pull/371) +- Replace matchers with explicit visitation in gatherers [#366](https://github.com/Instagram/LibCST/pull/366) +- Include missing test data in install [#365](https://github.com/Instagram/LibCST/pull/365) ## Fixed -- Spaces around walrus operator are not required [#368](https://github.com/Instagram/LibCST/pull/368) -- SaveMachedNode now matches with trailing empty wildcards [#356](https://github.com/Instagram/LibCST/pull/356) -- Correctly extract wildcard matchers [#355](https://github.com/Instagram/LibCST/pull/355) + +- Spaces around walrus operator are not required [#368](https://github.com/Instagram/LibCST/pull/368) +- SaveMachedNode now matches with trailing empty wildcards [#356](https://github.com/Instagram/LibCST/pull/356) +- Correctly extract wildcard matchers [#355](https://github.com/Instagram/LibCST/pull/355) # 0.3.9 - 2020-09-07 ## Added - - Support string type annotations in RemoveUnusedImports [#353](https://github.com/Instagram/LibCST/pull/353) - - Add scope to ImportAlias [#350](https://github.com/Instagram/LibCST/pull/350) - - Add scope to ClassDef [#349](https://github.com/Instagram/LibCST/pull/349) + +- Support string type annotations in RemoveUnusedImports [#353](https://github.com/Instagram/LibCST/pull/353) +- Add scope to ImportAlias [#350](https://github.com/Instagram/LibCST/pull/350) +- Add scope to ClassDef [#349](https://github.com/Instagram/LibCST/pull/349) ## Fixed - - Fixed all pyre related errors [#360](https://github.com/Instagram/LibCST/pull/360) - - Fixed enclosing attribute for attributes in call arguments [#362](https://github.com/Instagram/LibCST/pull/362) + +- Fixed all pyre related errors [#360](https://github.com/Instagram/LibCST/pull/360) +- Fixed enclosing attribute for attributes in call arguments [#362](https://github.com/Instagram/LibCST/pull/362) # 0.3.8 - 2020-07-22 ## Added - - Handle type subscripts when applying annotations. [#335](https://github.com/Instagram/LibCST/pull/335) - - Added FullRepoManager `cache` property [#330](https://github.com/Instagram/LibCST/pull/330) - - Added optional args for tox commands [#327](https://github.com/Instagram/LibCST/pull/327) + +- Handle type subscripts when applying annotations. [#335](https://github.com/Instagram/LibCST/pull/335) +- Added FullRepoManager `cache` property [#330](https://github.com/Instagram/LibCST/pull/330) +- Added optional args for tox commands [#327](https://github.com/Instagram/LibCST/pull/327) ## Updated - - Only remove trailing comma if the last alias is removed [#334](https://github.com/Instagram/LibCST/pull/334) + +- Only remove trailing comma if the last alias is removed [#334](https://github.com/Instagram/LibCST/pull/334) ## Fixed - - Fixed inserting imports after module docstring [#343](https://github.com/Instagram/LibCST/pull/343) - - Fixed ParenthesizedWhitespace before params in FuncDef [#342](https://github.com/Instagram/LibCST/pull/342) - - Fixed validation for ImportAlias and Try statements [#340](https://github.com/Instagram/LibCST/pull/340) - - Fixed NotEqual position issue [#325](https://github.com/Instagram/LibCST/pull/325) - - Fixed minor typo in scope_provider.py [#324](https://github.com/Instagram/LibCST/pull/324) + +- Fixed inserting imports after module docstring [#343](https://github.com/Instagram/LibCST/pull/343) +- Fixed ParenthesizedWhitespace before params in FuncDef [#342](https://github.com/Instagram/LibCST/pull/342) +- Fixed validation for ImportAlias and Try statements [#340](https://github.com/Instagram/LibCST/pull/340) +- Fixed NotEqual position issue [#325](https://github.com/Instagram/LibCST/pull/325) +- Fixed minor typo in scope_provider.py [#324](https://github.com/Instagram/LibCST/pull/324) # 0.3.7 - 2020-06-24 ## Added - - Added `RenameCommand` to rename all instances of a local or imported object to a specified new name. [#308](https://github.com/Instagram/LibCST/pull/308) + +- Added `RenameCommand` to rename all instances of a local or imported object to a specified new name. [#308](https://github.com/Instagram/LibCST/pull/308) ## Updated - - Upgraded Codecov dev dependency to 2.1.4. [#311](https://github.com/Instagram/LibCST/pull/311) - - Enabled Pyre `strict` mode by default. [#313](https://github.com/Instagram/LibCST/pull/313) + +- Upgraded Codecov dev dependency to 2.1.4. [#311](https://github.com/Instagram/LibCST/pull/311) +- Enabled Pyre `strict` mode by default. [#313](https://github.com/Instagram/LibCST/pull/313) ## Fixed - - Fixed `ImportError` under Python 3.9. [#306](https://github.com/Instagram/LibCST/pull/306) - - Fixed `stdout` being plugged into successfully codemod-ed files. [#309](https://github.com/Instagram/LibCST/pull/309) - - Fixed `QualifiedName` retrieval for names with repeated substrings. [#312](https://github.com/Instagram/LibCST/pull/312) - - Fixed default values of keyword-only and positional-only arguments in `ApplyTypeAnnotationsVisitor`. [#314](https://github.com/Instagram/LibCST/pull/314) - - Fixed `ExpressionContextProvider` by giving subscript values a `LOAD`context. [#319](https://github.com/Instagram/LibCST/pull/319) + +- Fixed `ImportError` under Python 3.9. [#306](https://github.com/Instagram/LibCST/pull/306) +- Fixed `stdout` being plugged into successfully codemod-ed files. [#309](https://github.com/Instagram/LibCST/pull/309) +- Fixed `QualifiedName` retrieval for names with repeated substrings. [#312](https://github.com/Instagram/LibCST/pull/312) +- Fixed default values of keyword-only and positional-only arguments in `ApplyTypeAnnotationsVisitor`. [#314](https://github.com/Instagram/LibCST/pull/314) +- Fixed `ExpressionContextProvider` by giving subscript values a `LOAD`context. [#319](https://github.com/Instagram/LibCST/pull/319) # 0.3.6 - 2020-05-27 ## Added - - Added `ConvertNamedTupleToDataclassCommand` to convert `NamedTuple` class declarations to Python 3.7 `dataclasses` using the `@dataclass(frozen=True)` decorator. [#299](https://github.com/Instagram/LibCST/pull/299) + +- Added `ConvertNamedTupleToDataclassCommand` to convert `NamedTuple` class declarations to Python 3.7 `dataclasses` using the `@dataclass(frozen=True)` decorator. [#299](https://github.com/Instagram/LibCST/pull/299) ## Fixed - - Fixed typo in file name `libcst/codemod/commands/convert_percent_format_to_fstring.py`. [#301](https://github.com/Instagram/LibCST/pull/301) - - Fixed `StopIteration` exception during scope analysis matching on import names. [#302](https://github.com/Instagram/LibCST/pull/302) + +- Fixed typo in file name `libcst/codemod/commands/convert_percent_format_to_fstring.py`. [#301](https://github.com/Instagram/LibCST/pull/301) +- Fixed `StopIteration` exception during scope analysis matching on import names. [#302](https://github.com/Instagram/LibCST/pull/302) # 0.3.5 - 2020-05-12 ## Updated - - Expose more granular `Assignments` and `Accesses` for dotted imports in `ScopeProvider`. [#284](https://github.com/Instagram/LibCST/pull/284) - - `get_qualified_names_for` returns the most appropriate qualified name. [#290](https://github.com/Instagram/LibCST/pull/290) - - Surface `SyntaxError` raised by formatter in codemod run. [#288](https://github.com/Instagram/LibCST/pull/288) [#289](https://github.com/Instagram/LibCST/pull/289) - - Rename `ApplyTypeAnnotationsVisitor.add_stub_to_context` as `ApplyTypeAnnotationsVisitor.store_stub_in_context` and add `overwrite_existing_annotations` to allow overwrite existing type annotations. [#289](https://github.com/Instagram/LibCST/pull/291) + +- Expose more granular `Assignments` and `Accesses` for dotted imports in `ScopeProvider`. [#284](https://github.com/Instagram/LibCST/pull/284) +- `get_qualified_names_for` returns the most appropriate qualified name. [#290](https://github.com/Instagram/LibCST/pull/290) +- Surface `SyntaxError` raised by formatter in codemod run. [#288](https://github.com/Instagram/LibCST/pull/288) [#289](https://github.com/Instagram/LibCST/pull/289) +- Rename `ApplyTypeAnnotationsVisitor.add_stub_to_context` as `ApplyTypeAnnotationsVisitor.store_stub_in_context` and add `overwrite_existing_annotations` to allow overwrite existing type annotations. [#289](https://github.com/Instagram/LibCST/pull/291) ## Fixed - - Close opened file handles on finishing codemod to avoid `Too many open files` on OSX. [#283](https://github.com/Instagram/LibCST/pull/283) + +- Close opened file handles on finishing codemod to avoid `Too many open files` on OSX. [#283](https://github.com/Instagram/LibCST/pull/283) ## Deprecated - - `ApplyTypeAnnotationsVisitor.add_stub_to_context` is renamed as `ApplyTypeAnnotationsVisitor.store_stub_in_context`. + +- `ApplyTypeAnnotationsVisitor.add_stub_to_context` is renamed as `ApplyTypeAnnotationsVisitor.store_stub_in_context`. # 0.3.4 - 2020-03-27 ## Added - - Supported CST parsing for Python 3.0, 3.1 and 3.3. [#261](https://github.com/Instagram/LibCST/pull/261) - - Added `RemoveUnusedImportsCommand` for removing unused import codemod. [#266](https://github.com/Instagram/LibCST/pull/266) - - Added `ApplyTypeAnnotationsVisitor.add_stub_to_context` for apply type annotations from stub modules. [#265](https://github.com/Instagram/LibCST/pull/265) + +- Supported CST parsing for Python 3.0, 3.1 and 3.3. [#261](https://github.com/Instagram/LibCST/pull/261) +- Added `RemoveUnusedImportsCommand` for removing unused import codemod. [#266](https://github.com/Instagram/LibCST/pull/266) +- Added `ApplyTypeAnnotationsVisitor.add_stub_to_context` for apply type annotations from stub modules. [#265](https://github.com/Instagram/LibCST/pull/265) ## Updated - - Improved exception message of `get_metadata` when MetadataWrapper is not used. [#257](https://github.com/Instagram/LibCST/pull/257) - - New steps for Pyre type check in README.rst which analyzes installed Python sources for better type checking. [#262](https://github.com/Instagram/LibCST/pull/262) + +- Improved exception message of `get_metadata` when MetadataWrapper is not used. [#257](https://github.com/Instagram/LibCST/pull/257) +- New steps for Pyre type check in README.rst which analyzes installed Python sources for better type checking. [#262](https://github.com/Instagram/LibCST/pull/262) ## Fixed - - Parsed `except(Exception):` correctly while there is no space after except syntax. [#256](https://github.com/Instagram/LibCST/pull/256) - - Fixed `RemoveImportsVisitor` to not remove imports when references still exist. [#264](https://github.com/Instagram/LibCST/pull/264) - - Fixed missing type annotations. [#271](https://github.com/Instagram/LibCST/pull/271) - - `AddImportsVisitor` generates deterministic order for added imports. [#274](https://github.com/Instagram/LibCST/pull/274) + +- Parsed `except(Exception):` correctly while there is no space after except syntax. [#256](https://github.com/Instagram/LibCST/pull/256) +- Fixed `RemoveImportsVisitor` to not remove imports when references still exist. [#264](https://github.com/Instagram/LibCST/pull/264) +- Fixed missing type annotations. [#271](https://github.com/Instagram/LibCST/pull/271) +- `AddImportsVisitor` generates deterministic order for added imports. [#274](https://github.com/Instagram/LibCST/pull/274) # 0.3.3 - 2020-03-05 ## Added - - `ByteSpanPositionProvider` provides start offset and length of CSTNode as metadata. - - `get_docstring` helper provides docstring from `Module`, `ClassDef` and `FunctionDef` node types. + +- `ByteSpanPositionProvider` provides start offset and length of CSTNode as metadata. +- `get_docstring` helper provides docstring from `Module`, `ClassDef` and `FunctionDef` node types. ## Updated - - Optimized `ScopeProvider` performance to run faster and use less memory: - - remove unnecessary `Assignment` of keyword `Arg`. - - don't provide scope object for formatting information nodes. - - batch set union updates in `infer_accesses` step. + +- Optimized `ScopeProvider` performance to run faster and use less memory: + - remove unnecessary `Assignment` of keyword `Arg`. + - don't provide scope object for formatting information nodes. + - batch set union updates in `infer_accesses` step. ## Fixed - - Fixed `_assignments` mutation when calling read-only `Scope.get_qualified_names_for` and `__contains__`. + +- Fixed `_assignments` mutation when calling read-only `Scope.get_qualified_names_for` and `__contains__`. # 0.3.2 - 2020-02-24 ## Added - - Added `RemoveImportsVisitor` to remove an import if it's not used in a module. - - Added `GatherExportsVisitor` to gather exports specified in `__all__`. - - Added property helpers `evaluated_name` and `evaluated_name` in `ImportAlias`. - - Added helper to get full module name: `get_absolute_module_for_import` and `get_absolute_module_for_import_or_raise`. - - Added `CodemodContext.full_module_name` for full dotted module name. - - Added format specifiers f-string conversion support to `ConvertFormatStringCommand`. + +- Added `RemoveImportsVisitor` to remove an import if it's not used in a module. +- Added `GatherExportsVisitor` to gather exports specified in `__all__`. +- Added property helpers `evaluated_name` and `evaluated_name` in `ImportAlias`. +- Added helper to get full module name: `get_absolute_module_for_import` and `get_absolute_module_for_import_or_raise`. +- Added `CodemodContext.full_module_name` for full dotted module name. +- Added format specifiers f-string conversion support to `ConvertFormatStringCommand`. ## Updated - - Moved LibCST version to `_version.py` and can print it by `python -m libcst.tool --version`. - - Improved `EnsureImportPresentCommand` with `--alias` option. - - Improved `ConvertFormatStringCommand` with `--allow-strip-comments` and `--allow-await` options. + +- Moved LibCST version to `_version.py` and can print it by `python -m libcst.tool --version`. +- Improved `EnsureImportPresentCommand` with `--alias` option. +- Improved `ConvertFormatStringCommand` with `--allow-strip-comments` and `--allow-await` options. # 0.3.1 - 2020-02-06 ## Added - - Added helpers to get both the raw and evaluated value of a SimpleString. - - Added helpers to get the quoting and prefix of SimpleString and FormattedString. - - Added a helper to get the evaluated value of number types. - - Added templated parsers for statement/expression/module to make constructing updated nodes in transforms easier. - - FullRepoManager is now integrated into codemods, so metadata requiring full repo analysis can now be used. - - Added `get_full_name_for_node_or_raise` helper to remove boilerplate of checking against `None`. +- Added helpers to get both the raw and evaluated value of a SimpleString. +- Added helpers to get the quoting and prefix of SimpleString and FormattedString. +- Added a helper to get the evaluated value of number types. +- Added templated parsers for statement/expression/module to make constructing updated nodes in transforms easier. +- FullRepoManager is now integrated into codemods, so metadata requiring full repo analysis can now be used. +- Added `get_full_name_for_node_or_raise` helper to remove boilerplate of checking against `None`. ## Updated - - Upgraded Pyre dependency to 0.0.41. - - Added additional status to `libcst codemod` command. - - `get_full_name_for_node` now supports decorators. +- Upgraded Pyre dependency to 0.0.41. +- Added additional status to `libcst codemod` command. +- `get_full_name_for_node` now supports decorators. ## Fixed - - Clarified documentation around f-strings, fixed indentation. - - Fixed `libcst list` crashing if a codemod does unsafe work on import. - - Fixed deploy-time dependencies so pyyaml won't have to be manually installed to execute codemods. - - QualifiedNameProvider no longer erroneously claims names inside attributes are built-ins. +- Clarified documentation around f-strings, fixed indentation. +- Fixed `libcst list` crashing if a codemod does unsafe work on import. +- Fixed deploy-time dependencies so pyyaml won't have to be manually installed to execute codemods. +- QualifiedNameProvider no longer erroneously claims names inside attributes are built-ins. # 0.3.0 - 2020-01-16 ## Added - - Added support for parsing and rendering Python 3.8 source code. - - Added more documentation for codemods. - - Added `get_full_name_for_expression` helper method. - - Added `has_name` helper to `QualifiedNameProvider`. - - Added a `--python-version` flag to `libcst.tool print` utility. +- Added support for parsing and rendering Python 3.8 source code. +- Added more documentation for codemods. +- Added `get_full_name_for_expression` helper method. +- Added `has_name` helper to `QualifiedNameProvider`. +- Added a `--python-version` flag to `libcst.tool print` utility. ## Updated - - Codemod command can now discover codemods in subdirectories of configured modules. - - Updgraded Pyre dependency to 0.0.39. +- Codemod command can now discover codemods in subdirectories of configured modules. +- Updgraded Pyre dependency to 0.0.39. ## Fixed - - Cleaned up some typos and formatting issues in comments and documentation. - - Cleaned up a few redundant typevars. - - Fixed callable typing in matchers implementation. - - Fixed incorrect base class references in matcher decorator attribute visitors. - - Fixed codemod test assertion failing for some whitespace edge cases. - - Fixed scope analysis to track variable usage on `del` statements. +- Cleaned up some typos and formatting issues in comments and documentation. +- Cleaned up a few redundant typevars. +- Fixed callable typing in matchers implementation. +- Fixed incorrect base class references in matcher decorator attribute visitors. +- Fixed codemod test assertion failing for some whitespace edge cases. +- Fixed scope analysis to track variable usage on `del` statements. ## Deprecated - - Deprecated exporting `ensure_type` from `libcst` in favor of `libcst.helpers`. +- Deprecated exporting `ensure_type` from `libcst` in favor of `libcst.helpers`. ## Removed - - Removed `ExtSlice` and helper code in favor of `SubscriptElement`. - - Removed `default_params` attribute on `Parameters`. - - Removed `SyntacticPositionProvider` and `BasicPositionProvider`. - - Removed `CodePosition` and `CodeRange` exports on `libcst` in favor of `libcst.metadata`. +- Removed `ExtSlice` and helper code in favor of `SubscriptElement`. +- Removed `default_params` attribute on `Parameters`. +- Removed `SyntacticPositionProvider` and `BasicPositionProvider`. +- Removed `CodePosition` and `CodeRange` exports on `libcst` in favor of `libcst.metadata`. # 0.2.7 - 2020-01-07 ## Updated - - Command-line interface now shows rough estimate of time remaining while executing a codemod. - - Add needed import now supports import aliases. +- Command-line interface now shows rough estimate of time remaining while executing a codemod. +- Add needed import now supports import aliases. # 0.2.6 - 2020-01-01 ## Added - - Added Codemod framework for running code transform over a codebase in parallel. - - Codemod for code transform logic. - - CodemodContext for preserving states across transforms. - - CodemodCommand for CLI interface. - - CodemodTest for testing codemod easily. - - yaml codemod config. - - Pre-build commands in codemod/commands/. - - Added TypeInferenceProvider for inferred type info from Pyre. A regression test suite was included. - - Added FullRepoManager for metadata inter-process cache handing. +- Added Codemod framework for running code transform over a codebase in parallel. + - Codemod for code transform logic. + - CodemodContext for preserving states across transforms. + - CodemodCommand for CLI interface. + - CodemodTest for testing codemod easily. + - yaml codemod config. + - Pre-build commands in codemod/commands/. +- Added TypeInferenceProvider for inferred type info from Pyre. A regression test suite was included. +- Added FullRepoManager for metadata inter-process cache handing. ## Fixed - - Fixed usage link in README. - - Fixed type annotation for Mypy compatibility. +- Fixed usage link in README. +- Fixed type annotation for Mypy compatibility. ## Updated - - Upgraded Pyre to 0.0.38 +- Upgraded Pyre to 0.0.38 # 0.2.5 - 2019-12-05 ## Added - - Added `extract`, `extractall` and `replace` functions to Matchers API. +- Added `extract`, `extractall` and `replace` functions to Matchers API. ## Fixed - - Fixed length restrictions for `AllOf` and `OneOf` so that they can be used with sequence expansion operators. - - Fixed batchable visitors not calling attribute visit functions. - - Fixed typos in docstrings. - - Fixed matcher type exception not being pickleable. +- Fixed length restrictions for `AllOf` and `OneOf` so that they can be used with sequence expansion operators. +- Fixed batchable visitors not calling attribute visit functions. +- Fixed typos in docstrings. +- Fixed matcher type exception not being pickleable. ## Deprecated - - Deprecated parsing function parameters with defaults into `default_params` attribute. They can be found in the `params` attribute instead. +- Deprecated parsing function parameters with defaults into `default_params` attribute. They can be found in the `params` attribute instead. # 0.2.4 - 2019-11-13 ## Fixed - - Fixed broken types for sequence matchers. +- Fixed broken types for sequence matchers. # 0.2.3 - 2019-11-11 ## Added - - Preliminary support for 3.8 walrus operator. - - CI config and fuzz tests for 3.8. - - Experimental re-entrant codegen API. - - Added `unsafe_skip_copy` optimization to `MetadataWrapper`. - - Matchers API now includes a `findall` function. - - Matchers now have a `MatchMetadataIfTrue` special matcher. +- Preliminary support for 3.8 walrus operator. +- CI config and fuzz tests for 3.8. +- Experimental re-entrant codegen API. +- Added `unsafe_skip_copy` optimization to `MetadataWrapper`. +- Matchers API now includes a `findall` function. +- Matchers now have a `MatchMetadataIfTrue` special matcher. ## Updated - - Updated to latest Black release. - - Better type documentation for generated matchers. +- Updated to latest Black release. +- Better type documentation for generated matchers. ## Fixed - - Clarified matchers documentation in several confusing areas. - - Drastically sped up codegen and tests. - - `QualifiedName` now supports imported attributtes. - - `ExpressionContext` properly marks loop variables as `STORE`. - - Various typos in documentation are fixed. +- Clarified matchers documentation in several confusing areas. +- Drastically sped up codegen and tests. +- `QualifiedName` now supports imported attributtes. +- `ExpressionContext` properly marks loop variables as `STORE`. +- Various typos in documentation are fixed. ## Deprecated - - Deprecated `BasicPositionProvider` and `SyntacticPositionProvider` in favor of `WhitespaceInclusivePositionProvider` and `PositionProvider`. +- Deprecated `BasicPositionProvider` and `SyntacticPositionProvider` in favor of `WhitespaceInclusivePositionProvider` and `PositionProvider`. # 0.2.2 - 2019-10-24 ## Added - - Added `deep_with_changes` helper method on CSTNode. - - Added metadata support to matchers. - - Added ability to get the defining node from a `LocalScope` (`FunctionScope`, `ClassScope` or `ComprehensionScope`). +- Added `deep_with_changes` helper method on CSTNode. +- Added metadata support to matchers. +- Added ability to get the defining node from a `LocalScope` (`FunctionScope`, `ClassScope` or `ComprehensionScope`). ## Updated - - Many changes to LibCST documentation including a new best practices page and updated scope tutorial. - - Exported `CodePosition` and `CodeRange` from `libcst.metadata` instead of `libcst`. +- Many changes to LibCST documentation including a new best practices page and updated scope tutorial. +- Exported `CodePosition` and `CodeRange` from `libcst.metadata` instead of `libcst`. ## Fixed - - Disallowed decorating a concrete visit or leave method with `@visit` or `@leave` decorators. - - Renamed position provider classes to be more self-explanatory. - - Fixed trailing newline detection when the last character in a file was from a continuation. - - Fixed `deep_clone` potentially blowing the stack with large LibCST trees. +- Disallowed decorating a concrete visit or leave method with `@visit` or `@leave` decorators. +- Renamed position provider classes to be more self-explanatory. +- Fixed trailing newline detection when the last character in a file was from a continuation. +- Fixed `deep_clone` potentially blowing the stack with large LibCST trees. ## Deprecated - - Deprecated `ExtSlice` in favor of `SubscriptElement`. - - Deprecated parsing `Subscript` slices directly into `Index` or `Slice` nodes. +- Deprecated `ExtSlice` in favor of `SubscriptElement`. +- Deprecated parsing `Subscript` slices directly into `Index` or `Slice` nodes. # 0.2.1 - 2019-10-14 ## Added - - `Scope.assignments` and `Scope.accesses` APIs to access all references in a scope. - - Scope analysis tutorial. +- `Scope.assignments` and `Scope.accesses` APIs to access all references in a scope. +- Scope analysis tutorial. ## Updated - - Supported `` in `Scope.get_qualified_names_for` and `QualifiedName`. - - Enforced identity equality for matchers and immutability of non-dataclass matchers. - - Generalize codegen cleanup steps for all codegen. +- Supported `` in `Scope.get_qualified_names_for` and `QualifiedName`. +- Enforced identity equality for matchers and immutability of non-dataclass matchers. +- Generalize codegen cleanup steps for all codegen. ## Fixed - - Made `BatchableMetadataProvider` typing covariant over its typevar. - - Fixed LICENSE header on generated matcher file. - - Cleanup unused internal noqa and on-call specification. + +- Made `BatchableMetadataProvider` typing covariant over its typevar. +- Fixed LICENSE header on generated matcher file. +- Cleanup unused internal noqa and on-call specification. # 0.2.0 - 2019-10-04 ## Added - - Added matchers which allow comparing LibCST trees against arbitrary patterns. - - Improved tree manipulation with `deep_remove` and `deep_replace` helper methods on CSTNode. - - Added new metadata providers: parent node and qualified name. +- Added matchers which allow comparing LibCST trees against arbitrary patterns. +- Improved tree manipulation with `deep_remove` and `deep_replace` helper methods on CSTNode. +- Added new metadata providers: parent node and qualified name. ## Updated - - Updated Pyre to latest release. - - Updated scope metadata to provide additional helpers. - - Updated preferred method of removing a node from its parent in a visitor. +- Updated Pyre to latest release. +- Updated scope metadata to provide additional helpers. +- Updated preferred method of removing a node from its parent in a visitor. ## Fixed - - Metadata classes and methods are now exported from "libcst.metadata" instead of several submodules. - - Fixed LICENSE file to explicitly reference individual files in the repo with different licenses. - - Fixed `deep_clone` to correctly clone leaf nodes. - - Fixed all parse entrypoints to always return a tree with no duplicated leaf nodes. +- Metadata classes and methods are now exported from "libcst.metadata" instead of several submodules. +- Fixed LICENSE file to explicitly reference individual files in the repo with different licenses. +- Fixed `deep_clone` to correctly clone leaf nodes. +- Fixed all parse entrypoints to always return a tree with no duplicated leaf nodes. # 0.1.3 - 2019-09-18 ## Added - - Added preliminary support for parsing Python 3.5 and Python 3.6 source. - - Added scope analysis metadata provider. - - Added mypy type support for built package. +- Added preliminary support for parsing Python 3.5 and Python 3.6 source. +- Added scope analysis metadata provider. +- Added mypy type support for built package. ## Fixed - - Several typos in documentation are fixed. +- Several typos in documentation are fixed. # 0.1.2 - 2019-08-29 ## Added - - Added attribute visitor hooks. - - Added base visit/leave methods which can be subclassed. - - Hypothesis fuzz testing suite, courtesy of Zac Hatfield-Dodds. +- Added attribute visitor hooks. +- Added base visit/leave methods which can be subclassed. +- Hypothesis fuzz testing suite, courtesy of Zac Hatfield-Dodds. ## Fixed - - Metadata documentation is much more complete. - - Fixed several whitespace validation issues caught by Hypothesis. - - Parser syntax errors are now used inside parser. +- Metadata documentation is much more complete. +- Fixed several whitespace validation issues caught by Hypothesis. +- Parser syntax errors are now used inside parser. # 0.1.1 - 2019-08-20 ## Added -- Metadata interface is now exported. +- Metadata interface is now exported. ## Fixed -- Dependencies are now specified with minimum revisions. -- Lots of documentation fixes. +- Dependencies are now specified with minimum revisions. +- Lots of documentation fixes. # 0.1 - 2019-07-23 ## Added - - First public release of LibCST. - - Complete, fully typed syntax tree for Python 3.6. - - Full suite of tests for each defined node type. +- First public release of LibCST. +- Complete, fully typed syntax tree for Python 3.6. +- Full suite of tests for each defined node type. From ff5fcf8dfb61280702fb343b57b2a5c1a01f0fe7 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 13 Jun 2022 19:29:56 +0100 Subject: [PATCH 255/632] Update pypi_upload.yml Remove prod repo url override --- .github/workflows/pypi_upload.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index bbc491de..ba7baf2e 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -59,5 +59,4 @@ jobs: with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} - repository_url: https://pypi.org/legacy/ packages_dir: ${{ steps.download.outputs.download-path }} From 153c6d12c0ea838dfc0cd3c6adb46a6e9cf020d6 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 16 Jun 2022 09:47:36 +0100 Subject: [PATCH 256/632] Only skip supported escaped characters in f-strings (#700) --- native/libcst/src/tokenizer/core/mod.rs | 20 ++++++++++++- native/libcst/src/tokenizer/tests.rs | 28 +++++++++++++++++++ native/libcst/tests/fixtures/super_strings.py | 4 +++ 3 files changed, 51 insertions(+), 1 deletion(-) diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 9a869555..09b4a004 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -940,7 +940,25 @@ impl<'t> TokState<'t> { // skip escaped char (e.g. \', \", or newline/line continuation) self.text_pos.next(); } - } else { + } else if let Some( + '\n' + | '\\' + | '\'' + | '"' + | 'a' + | 'b' + | 'f' + | 'n' + | 'r' + | 't' + | 'v' + | 'x' + | '0'..='9' + | 'N' + | 'u' + | 'U', + ) = self.text_pos.peek() + { // skip escaped char let next_ch = self.text_pos.next(); // check if this is a \N sequence diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index 300e6d29..200d170a 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -617,6 +617,34 @@ fn test_split_fstring() { ); } +#[test] +fn test_fstring_escapes() { + let config = TokConfig { + split_fstring: true, + ..default_config() + }; + assert_eq!( + tokenize_all("f'\\{{\\}}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::FStringString, "\\{{\\}}"), + (TokType::FStringEnd, "'"), + ]) + ); + assert_eq!( + tokenize_all(r#"f"regexp_like(path, '.*\{file_type}$')""#, &config), + Ok(vec![ + (TokType::FStringStart, "f\""), + (TokType::FStringString, "regexp_like(path, '.*\\"), + (TokType::Op, "{"), + (TokType::Name, "file_type"), + (TokType::Op, "}"), + (TokType::FStringString, "$')"), + (TokType::FStringEnd, "\""), + ]) + ); +} + #[test] fn test_operator() { assert_eq!( diff --git a/native/libcst/tests/fixtures/super_strings.py b/native/libcst/tests/fixtures/super_strings.py index 76b1b2dd..d993f5ab 100644 --- a/native/libcst/tests/fixtures/super_strings.py +++ b/native/libcst/tests/fixtures/super_strings.py @@ -26,3 +26,7 @@ _ = f"something {{**not** an expression}} {but(this._is)} {{and this isn't.}} en _(f"ok { expr = !r: aosidjhoi } end") print(f"{self.ERASE_CURRENT_LINE}{self._human_seconds(elapsed_time)} {percent:.{self.pretty_precision}f}% complete, {self.estimate_completion(elapsed_time, finished, left)} estimated for {left} files to go...") + +f'\{{\}}' +f"regexp_like(path, '.*\{file_type}$')" +f"\lfoo" From 4c9728ab12c0fecf1d93e40532705bd5cf6a79b9 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 16 Jun 2022 09:47:57 +0100 Subject: [PATCH 257/632] Tokenize escaped quotes in raw f-strings correctly (#701) --- native/libcst/src/tokenizer/core/mod.rs | 7 ++++++ native/libcst/src/tokenizer/tests.rs | 30 +++++++++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 09b4a004..89877f7e 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -971,6 +971,13 @@ impl<'t> TokState<'t> { } } } + (Some('\\'), _) if is_raw_string => { + self.text_pos.next(); + if let Some('"' | '\'') = self.text_pos.peek() { + // these aren't end of string markers, skip them + self.text_pos.next(); + } + } (Some('{'), _) => { if is_in_format_spec { // don't actually consume the {, and generate an OP for it instead diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index 200d170a..69deaaf3 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -519,6 +519,36 @@ fn test_string_prefix() { (TokType::String, "''"), ]), ); + + // raw string escapes + assert_eq!( + tokenize_all("r'\\''", &default_config()), + Ok(vec![(TokType::String, "r'\\''")]), + ); + assert_eq!( + tokenize_all(r#"r"\"""#, &default_config()), + Ok(vec![(TokType::String, r#"r"\"""#)]), + ); + let config = TokConfig { + split_fstring: true, + ..default_config() + }; + assert_eq!( + tokenize_all("rf'\\''", &config), + Ok(vec![ + (TokType::FStringStart, "rf'"), + (TokType::FStringString, "\\'"), + (TokType::FStringEnd, "'"), + ]), + ); + assert_eq!( + tokenize_all(r#"rf"\"""#, &config), + Ok(vec![ + (TokType::FStringStart, "rf\""), + (TokType::FStringString, r#"\""#), + (TokType::FStringEnd, "\""), + ]), + ); } #[test] From 84da2836047e185dd905b5520e4c3ece0e1e7052 Mon Sep 17 00:00:00 2001 From: zzl Date: Thu, 16 Jun 2022 04:49:24 -0400 Subject: [PATCH 258/632] Fix code example in metadata documentation. (#703) Co-authored-by: zzl0 --- docs/source/metadata.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index bdda7a21..172b1b51 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -18,10 +18,10 @@ numbers of nodes through the :class:`~libcst.metadata.PositionProvider`: .. code-block:: python class NamePrinter(cst.CSTVisitor): - METADATA_DEPENDENCIES = (cst.PositionProvider,) + METADATA_DEPENDENCIES = (cst.metadata.PositionProvider,) def visit_Name(self, node: cst.Name) -> None: - pos = self.get_metadata(cst.PositionProvider, node).start + pos = self.get_metadata(cst.metadata.PositionProvider, node).start print(f"{node.value} found at line {pos.line}, column {pos.column}") wrapper = cst.metadata.MetadataWrapper(cst.parse_module("x = 1")) From 6f28c799bb1ce4b0a00ac67b760b6a7ca29f786f Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Thu, 16 Jun 2022 12:45:02 +0100 Subject: [PATCH 259/632] Fix slow perf in 0.4.2+ (#698) --- libcst/metadata/scope_provider.py | 20 ++++++------ libcst/metadata/tests/test_scope_provider.py | 34 +++++++++----------- 2 files changed, 26 insertions(+), 28 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index f4db3e89..12ffc920 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -406,16 +406,18 @@ class Scope(abc.ABC): #: Refers to the GlobalScope. globals: "GlobalScope" _assignments: MutableMapping[str, Set[BaseAssignment]] - _accesses: MutableMapping[str, Set[Access]] _assignment_count: int + _accesses_by_name: MutableMapping[str, Set[Access]] + _accesses_by_node: MutableMapping[cst.CSTNode, Set[Access]] def __init__(self, parent: "Scope") -> None: super().__init__() self.parent = parent self.globals = parent.globals self._assignments = defaultdict(set) - self._accesses = defaultdict(set) self._assignment_count = 0 + self._accesses_by_name = defaultdict(set) + self._accesses_by_node = defaultdict(set) def record_assignment(self, name: str, node: cst.CSTNode) -> None: target = self._find_assignment_target(name) @@ -446,7 +448,8 @@ class Scope(abc.ABC): return self def record_access(self, name: str, access: Access) -> None: - self._accesses[name].add(access) + self._accesses_by_name[name].add(access) + self._accesses_by_node[access.node].add(access) def _getitem_from_self_or_parent(self, name: str) -> Set[BaseAssignment]: """Overridden by ClassScope to hide it's assignments from child scopes.""" @@ -545,12 +548,9 @@ class Scope(abc.ABC): """ # if this node is an access we know the assignment and we can use that name - node_accesses = { - access - for all_accesses in self._accesses.values() - for access in all_accesses - if access.node == node - } + node_accesses = ( + self._accesses_by_node.get(node) if isinstance(node, cst.CSTNode) else None + ) if node_accesses: return { qname @@ -589,7 +589,7 @@ class Scope(abc.ABC): @property def accesses(self) -> Accesses: """Return an :class:`~libcst.metadata.Accesses` contains all accesses in current scope.""" - return Accesses(self._accesses) + return Accesses(self._accesses_by_name) class BuiltinScope(Scope): diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 0a541146..267a5901 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1445,35 +1445,33 @@ class ScopeProviderTest(UnitTest): def test_global_contains_is_read_only(self) -> None: gscope = GlobalScope() - before_assignments = list(gscope._assignments.items()) - before_accesses = list(gscope._accesses.items()) + before_assignments = list(gscope.assignments) + before_accesses = list(gscope.accesses) self.assertFalse("doesnt_exist" in gscope) - self.assertEqual(list(gscope._accesses.items()), before_accesses) - self.assertEqual(list(gscope._assignments.items()), before_assignments) + self.assertEqual(list(gscope.accesses), before_accesses) + self.assertEqual(list(gscope.assignments), before_assignments) def test_contains_is_read_only(self) -> None: for s in [LocalScope, FunctionScope, ClassScope, ComprehensionScope]: with self.subTest(scope=s): gscope = GlobalScope() scope = s(parent=gscope, node=cst.Name("lol")) - before_assignments = list(scope._assignments.items()) - before_accesses = list(scope._accesses.items()) + before_assignments = list(scope.assignments) + before_accesses = list(scope.accesses) before_overwrites = list(scope._scope_overwrites.items()) - before_parent_assignments = list(scope.parent._assignments.items()) - before_parent_accesses = list(scope.parent._accesses.items()) + before_parent_assignments = list(scope.parent.assignments) + before_parent_accesses = list(scope.parent.accesses) self.assertFalse("doesnt_exist" in scope) - self.assertEqual(list(scope._accesses.items()), before_accesses) - self.assertEqual(list(scope._assignments.items()), before_assignments) + self.assertEqual(list(scope.accesses), before_accesses) + self.assertEqual(list(scope.assignments), before_assignments) self.assertEqual( list(scope._scope_overwrites.items()), before_overwrites ) self.assertEqual( - list(scope.parent._assignments.items()), before_parent_assignments - ) - self.assertEqual( - list(scope.parent._accesses.items()), before_parent_accesses + list(scope.parent.assignments), before_parent_assignments ) + self.assertEqual(list(scope.parent.accesses), before_parent_accesses) def test_attribute_of_function_call(self) -> None: get_scope_metadata_provider("foo().bar") @@ -1496,11 +1494,11 @@ class ScopeProviderTest(UnitTest): ) a = m.body[0] scope = scopes[a] - assignments_len_before = len(scope._assignments) - accesses_len_before = len(scope._accesses) + assignments_before = list(scope.assignments) + accesses_before = list(scope.accesses) scope.get_qualified_names_for("doesnt_exist") - self.assertEqual(len(scope._assignments), assignments_len_before) - self.assertEqual(len(scope._accesses), accesses_len_before) + self.assertEqual(list(scope.assignments), assignments_before) + self.assertEqual(list(scope.accesses), accesses_before) def test_gen_dotted_names(self) -> None: names = {name for name, node in _gen_dotted_names(cst.Name(value="a"))} From 7ca1bd1cd5d2eb37f4456d08de161db142274098 Mon Sep 17 00:00:00 2001 From: zzl Date: Fri, 17 Jun 2022 08:05:05 -0400 Subject: [PATCH 260/632] expression: fix SimpleString's quote method (#704) * expression: fix SimpleString's quote method * Add missing copyright header Co-authored-by: zzl0 Co-authored-by: Luke Petre --- libcst/_nodes/expression.py | 12 ++++++--- libcst/_nodes/tests/test_simple_string.py | 31 +++++++++++++++++++++++ 2 files changed, 40 insertions(+), 3 deletions(-) create mode 100644 libcst/_nodes/tests/test_simple_string.py diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 89a9d806..1a90a557 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -656,14 +656,20 @@ class SimpleString(_BasePrefixedString): if len(quote) == 2: # Let's assume this is an empty string. quote = quote[:1] - elif len(quote) == 6: - # Let's assume this is an empty triple-quoted string. + elif 3 < len(quote) <= 6: + # Let's assume this can be one of the following: + # >>> """"foo""" + # '"foo' + # >>> """""bar""" + # '""bar' + # >>> """""" + # '' quote = quote[:3] if len(quote) not in {1, 3}: # We shouldn't get here due to construction validation logic, # but handle the case anyway. - raise Exception("Invalid string {self.value}") + raise Exception(f"Invalid string {self.value}") # pyre-ignore We know via the above validation that we will only # ever return one of the four string literals. diff --git a/libcst/_nodes/tests/test_simple_string.py b/libcst/_nodes/tests/test_simple_string.py new file mode 100644 index 00000000..d9abec50 --- /dev/null +++ b/libcst/_nodes/tests/test_simple_string.py @@ -0,0 +1,31 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import unittest + +import libcst as cst + + +class TestSimpleString(unittest.TestCase): + def test_quote(self) -> None: + test_cases = [ + ('"a"', '"'), + ("'b'", "'"), + ('""', '"'), + ("''", "'"), + ('"""c"""', '"""'), + ("'''d'''", "'''"), + ('""""e"""', '"""'), + ("''''f'''", "'''"), + ('"""""g"""', '"""'), + ("'''''h'''", "'''"), + ('""""""', '"""'), + ("''''''", "'''"), + ] + + for s, expected_quote in test_cases: + simple_string = cst.SimpleString(s) + actual = simple_string.quote + self.assertEqual(expected_quote, actual) From ea8d3d55a552a2d4bb66f29797fea9a0674760fc Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Fri, 17 Jun 2022 13:14:18 +0100 Subject: [PATCH 261/632] Update changelog for 0.4.5 (#707) --- CHANGELOG.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 87779ccd..967565d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,17 @@ +# 0.4.5 - 2022-06-17 + +## New Contributors + +- @zzl0 made their first contribution in https://github.com/Instagram/LibCST/pull/704 + +## Fixes + +- Only skip supported escaped characters in f-strings by @zsol in https://github.com/Instagram/LibCST/pull/700 +- Escaping quote characters in raw string literals causes a tokenizer error by @zsol in https://github.com/Instagram/LibCST/issues/668 +- Corrected a code example in the documentation by @zzl0 in https://github.com/Instagram/LibCST/pull/703 +- Handle multiline strings that start with quotes by @zzl0 in https://github.com/Instagram/LibCST/pull/704 +- Fixed a performance regression in libcst.metadata.ScopeProvider by @lpetre in https://github.com/Instagram/LibCST/pull/698 + # 0.4.4 - 2022-06-13 ## New Contributors From 306a5f8175cf3b7997f8afe8199d8811e9c8f648 Mon Sep 17 00:00:00 2001 From: Sergei Lebedev <185856+superbobry@users.noreply.github.com> Date: Mon, 20 Jun 2022 14:39:12 +0100 Subject: [PATCH 262/632] convert_type_comments now preserves comments following type comments (#702) For example, y = 5 # type: int # foo is converted to y: int = 5 # foo --- libcst/codemod/commands/convert_type_comments.py | 16 ++++++++++++++++ .../commands/tests/test_convert_type_comments.py | 9 +++++++++ 2 files changed, 25 insertions(+) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index bd7d902f..808edec6 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -126,6 +126,19 @@ def _is_type_comment(comment: Optional[cst.Comment]) -> bool: return True +def _strip_type_comment(comment: Optional[cst.Comment]) -> Optional[cst.Comment]: + """ + Remove the type comment while keeping any following comments. + """ + if not _is_type_comment(comment): + return comment + assert comment is not None + idx = comment.value.find("#", 1) + if idx < 0: + return None + return comment.with_changes(value=comment.value[idx:]) + + class _FailedToApplyAnnotation: pass @@ -504,6 +517,9 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): self, node: cst.TrailingWhitespace, ) -> cst.TrailingWhitespace: + trailing_comment = _strip_type_comment(node.comment) + if trailing_comment is not None: + return node.with_changes(comment=trailing_comment) return node.with_changes( whitespace=cst.SimpleWhitespace( "" diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index 98eaa767..0e350238 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -28,6 +28,15 @@ class TestConvertTypeCommentsBase(CodemodTest): class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): + def test_preserves_trailing_comment(self) -> None: + before = """ + y = 5 # type: int # foo + """ + after = """ + y: int = 5 # foo + """ + self.assertCodemod39Plus(before, after) + def test_convert_assignments(self) -> None: before = """ y = 5 # type: int From 42164f8672fcd0a7cfdb92950e74e64be4f069ea Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Tue, 21 Jun 2022 10:11:02 +0100 Subject: [PATCH 263/632] Cache the scope name prefix to prevent scope traversal in a tight loop (#708) * Cache the scope name prefix to prevent scope traversal in a tight loop * Adding pyre-fixme. this attribute iclearly has a type in the base class. * Clarify why we do join(filter(None,... --- libcst/metadata/scope_provider.py | 43 +++++++++++++++++-------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 12ffc920..ed0702f5 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -217,24 +217,12 @@ class Assignment(BaseAssignment): return self.__index def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: - scope = self.scope - name_prefixes = [] - while scope: - if isinstance(scope, ClassScope): - name_prefixes.append(scope.name) - elif isinstance(scope, FunctionScope): - name_prefixes.append(f"{scope.name}.") - elif isinstance(scope, ComprehensionScope): - name_prefixes.append("") - elif not isinstance(scope, (GlobalScope, BuiltinScope)): - raise Exception(f"Unexpected Scope: {scope}") - - scope = scope.parent if scope.parent != scope else None - - parts = [*reversed(name_prefixes)] - if full_name: - parts.append(full_name) - return {QualifiedName(".".join(parts), QualifiedNameSource.LOCAL)} + return { + QualifiedName( + self.scope._maybe_dotted_name(full_name), + QualifiedNameSource.LOCAL, + ) + } # even though we don't override the constructor. @@ -409,6 +397,7 @@ class Scope(abc.ABC): _assignment_count: int _accesses_by_name: MutableMapping[str, Set[Access]] _accesses_by_node: MutableMapping[cst.CSTNode, Set[Access]] + _name_prefix: str def __init__(self, parent: "Scope") -> None: super().__init__() @@ -418,6 +407,7 @@ class Scope(abc.ABC): self._assignment_count = 0 self._accesses_by_name = defaultdict(set) self._accesses_by_node = defaultdict(set) + self._name_prefix = "" def record_assignment(self, name: str, node: cst.CSTNode) -> None: target = self._find_assignment_target(name) @@ -591,6 +581,11 @@ class Scope(abc.ABC): """Return an :class:`~libcst.metadata.Accesses` contains all accesses in current scope.""" return Accesses(self._accesses_by_name) + # makes a dot separated name but filters out empty strings + def _maybe_dotted_name(self, *args: Optional[str]) -> str: + # filter(None, ...) removes all falsey values (ie empty string) + return ".".join(filter(None, [self._name_prefix, *args])) + class BuiltinScope(Scope): """ @@ -667,6 +662,8 @@ class LocalScope(Scope, abc.ABC): self.name = name self.node = node self._scope_overwrites = {} + # pyre-fixme[4]: Attribute `_name_prefix` of class `LocalScope` has type `str` but no type is specified. + self._name_prefix = self._make_name_prefix() def record_global_overwrite(self, name: str) -> None: self._scope_overwrites[name] = self.globals @@ -695,6 +692,9 @@ class LocalScope(Scope, abc.ABC): else: return self.parent._getitem_from_self_or_parent(name) + def _make_name_prefix(self) -> str: + return self.parent._maybe_dotted_name(self.name, "") + # even though we don't override the constructor. class FunctionScope(LocalScope): @@ -741,6 +741,9 @@ class ClassScope(LocalScope): """ return self.parent._contains_in_self_or_parent(name) + def _make_name_prefix(self) -> str: + return self.parent._maybe_dotted_name(self.name) + # even though we don't override the constructor. class ComprehensionScope(LocalScope): @@ -755,7 +758,9 @@ class ComprehensionScope(LocalScope): # TODO: Assignment expressions (Python 3.8) will complicate ComprehensionScopes, # and will require us to handle such assignments as non-local. # https://www.python.org/dev/peps/pep-0572/#scope-of-the-target - pass + + def _make_name_prefix(self) -> str: + return self.parent._maybe_dotted_name("") # Generates dotted names from an Attribute or Name node: From 779163701cf581edfe4f466e80f567929054a325 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Tue, 21 Jun 2022 14:26:45 +0100 Subject: [PATCH 264/632] Faster qualified name format (#710) --- libcst/metadata/scope_provider.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index ed0702f5..ccc6cebd 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -219,7 +219,9 @@ class Assignment(BaseAssignment): def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: return { QualifiedName( - self.scope._maybe_dotted_name(full_name), + f"{self.scope._name_prefix}.{full_name}" + if self.scope._name_prefix + else full_name, QualifiedNameSource.LOCAL, ) } @@ -581,11 +583,6 @@ class Scope(abc.ABC): """Return an :class:`~libcst.metadata.Accesses` contains all accesses in current scope.""" return Accesses(self._accesses_by_name) - # makes a dot separated name but filters out empty strings - def _maybe_dotted_name(self, *args: Optional[str]) -> str: - # filter(None, ...) removes all falsey values (ie empty string) - return ".".join(filter(None, [self._name_prefix, *args])) - class BuiltinScope(Scope): """ @@ -693,7 +690,8 @@ class LocalScope(Scope, abc.ABC): return self.parent._getitem_from_self_or_parent(name) def _make_name_prefix(self) -> str: - return self.parent._maybe_dotted_name(self.name, "") + # filter falsey strings out + return ".".join(filter(None, [self.parent._name_prefix, self.name, ""])) # even though we don't override the constructor. @@ -742,7 +740,8 @@ class ClassScope(LocalScope): return self.parent._contains_in_self_or_parent(name) def _make_name_prefix(self) -> str: - return self.parent._maybe_dotted_name(self.name) + # filter falsey strings out + return ".".join(filter(None, [self.parent._name_prefix, self.name])) # even though we don't override the constructor. @@ -760,7 +759,8 @@ class ComprehensionScope(LocalScope): # https://www.python.org/dev/peps/pep-0572/#scope-of-the-target def _make_name_prefix(self) -> str: - return self.parent._maybe_dotted_name("") + # filter falsey strings out + return ".".join(filter(None, [self.parent._name_prefix, ""])) # Generates dotted names from an Attribute or Name node: From aa4a2790dbed53fef41617ec89494f4fe84231be Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Tue, 21 Jun 2022 21:35:05 +0100 Subject: [PATCH 265/632] Remove unnecessary qname work (#709) --- libcst/metadata/scope_provider.py | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index ccc6cebd..26cfcb90 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -538,7 +538,6 @@ class Scope(abc.ABC): considering it could be a complex type annotation in the string which is hard to resolve, e.g. ``List[Union[int, str]]``. """ - # if this node is an access we know the assignment and we can use that name node_accesses = ( self._accesses_by_node.get(node) if isinstance(node, cst.CSTNode) else None @@ -551,26 +550,28 @@ class Scope(abc.ABC): for qname in referent.get_qualified_names_for(referent.name) } - results = set() full_name = get_full_name_for_node(node) if full_name is None: - return results + return set() + assignments = set() - parts = full_name.split(".") - for i in range(len(parts), 0, -1): - prefix = ".".join(parts[:i]) + prefix = full_name + while prefix: if prefix in self: assignments = self[prefix] break + prefix = prefix[: prefix.rfind(".")] + + if not isinstance(node, str): + for assignment in assignments: + if isinstance(assignment, Assignment) and _is_assignment( + node, assignment.node + ): + return assignment.get_qualified_names_for(full_name) + + results = set() for assignment in assignments: - names = assignment.get_qualified_names_for(full_name) - if ( - isinstance(assignment, Assignment) - and not isinstance(node, str) - and _is_assignment(node, assignment.node) - ): - return names - results |= names + results |= assignment.get_qualified_names_for(full_name) return results @property From 5592f2e00fa73aea3fe2614185d4673150173534 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 26 Jun 2022 09:41:49 +0100 Subject: [PATCH 266/632] Fix parsing of parenthesized empty tuples (#712) * Don't drop rpars from empty tuples during inflate --- native/libcst/src/nodes/expression.rs | 7 +------ native/libcst/tests/fixtures/tuple_shenanigans.py | 2 ++ 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 15abe99b..ab6b9003 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -1007,12 +1007,7 @@ impl<'r, 'a> Inflate<'a> for DeflatedTuple<'r, 'a> { .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) .collect::>>()?; - let rpar = if !elements.is_empty() { - // rpar only has whitespace if elements is non empty - self.rpar.inflate(config)? - } else { - vec![Default::default()] - }; + let rpar = self.rpar.inflate(config)?; Ok(Self::Inflated { elements, lpar, diff --git a/native/libcst/tests/fixtures/tuple_shenanigans.py b/native/libcst/tests/fixtures/tuple_shenanigans.py index f31c6452..136d79d2 100644 --- a/native/libcst/tests/fixtures/tuple_shenanigans.py +++ b/native/libcst/tests/fixtures/tuple_shenanigans.py @@ -4,6 +4,8 @@ # alright here we go. () +(()) +(((())), ()) ( # evil >:) # evil >:( ) # ... From 992511739196b48bc014a2bbeb29150653ad1700 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 26 Jun 2022 09:42:37 +0100 Subject: [PATCH 267/632] Support whitespace after ParamSlash (#713) * add whitespace_after field to ParamSlash * codegen --- libcst/_nodes/expression.py | 15 ++++++-- libcst/_nodes/tests/test_funcdef.py | 34 ++++++++++++++++++- libcst/_nodes/tests/test_lambda.py | 16 +++++++++ libcst/_typed_visitor.py | 8 +++++ libcst/matchers/__init__.py | 6 ++++ native/libcst/src/nodes/expression.rs | 11 +++++- native/libcst/src/parser/grammar.rs | 32 ++++++++--------- .../tests/fixtures/fun_with_func_defs.py | 11 ++++++ 8 files changed, 113 insertions(+), 20 deletions(-) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 1a90a557..2df1e3bc 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -1732,15 +1732,26 @@ class ParamSlash(CSTNode): .. _PEP 570: https://www.python.org/dev/peps/pep-0570/#specification """ - # Optional comma that comes after the slash. + #: Optional comma that comes after the slash. This comma doesn't own the whitespace + #: between ``/`` and ``,``. comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT + #: Whitespace after the ``/`` character. This is captured here in case there is a + #: comma. + whitespace_after: BaseParenthesizableWhitespace = SimpleWhitespace.field("") + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ParamSlash": - return ParamSlash(comma=visit_sentinel(self, "comma", self.comma, visitor)) + return ParamSlash( + comma=visit_sentinel(self, "comma", self.comma, visitor), + whitespace_after=visit_required( + self, "whitespace_after", self.whitespace_after, visitor + ), + ) def _codegen_impl(self, state: CodegenState, default_comma: bool = False) -> None: state.add_token("/") + self.whitespace_after._codegen(state) comma = self.comma if comma is MaybeSentinel.DEFAULT and default_comma: state.add_token(", ") diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 7dedb6b8..4711aac5 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -1874,6 +1874,36 @@ class FunctionDefParserTest(CSTNodeTest): ), "code": "def foo(bar, baz, /): pass\n", }, + # Positional only params with whitespace after but no comma + { + "node": cst.FunctionDef( + cst.Name("foo"), + cst.Parameters( + posonly_params=( + cst.Param( + cst.Name("bar"), + star="", + comma=cst.Comma( + whitespace_after=cst.SimpleWhitespace(" ") + ), + ), + cst.Param( + cst.Name("baz"), + star="", + comma=cst.Comma( + whitespace_after=cst.SimpleWhitespace(" ") + ), + ), + ), + posonly_ind=cst.ParamSlash( + whitespace_after=cst.SimpleWhitespace(" ") + ), + ), + cst.SimpleStatementSuite((cst.Pass(),)), + ), + "code": "def foo(bar, baz, / ): pass\n", + "native_only": True, + }, # Typed positional only params { "node": cst.FunctionDef( @@ -2089,7 +2119,9 @@ class FunctionDefParserTest(CSTNodeTest): }, ) ) - def test_valid_38(self, node: cst.CSTNode, code: str) -> None: + def test_valid_38(self, node: cst.CSTNode, code: str, **kwargs: Any) -> None: + if not is_native() and kwargs.get("native_only", False): + self.skipTest("disabled for pure python parser") self.validate_node(node, code, _parse_statement_force_38) @data_provider( diff --git a/libcst/_nodes/tests/test_lambda.py b/libcst/_nodes/tests/test_lambda.py index 82c1b675..f956ee03 100644 --- a/libcst/_nodes/tests/test_lambda.py +++ b/libcst/_nodes/tests/test_lambda.py @@ -30,6 +30,22 @@ class LambdaCreationTest(CSTNodeTest): ), "code": "lambda bar, baz, /: 5", }, + # Test basic positional only params with extra trailing whitespace + { + "node": cst.Lambda( + cst.Parameters( + posonly_params=( + cst.Param(cst.Name("bar")), + cst.Param(cst.Name("baz")), + ), + posonly_ind=cst.ParamSlash( + whitespace_after=cst.SimpleWhitespace(" ") + ), + ), + cst.Integer("5"), + ), + "code": "lambda bar, baz, / : 5", + }, # Test basic positional params ( cst.Lambda( diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 8eae0d37..f68cb2a8 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -4307,6 +4307,14 @@ class CSTTypedBaseFunctions: def leave_ParamSlash_comma(self, node: "ParamSlash") -> None: pass + @mark_no_op + def visit_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def leave_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: + pass + @mark_no_op def visit_ParamStar(self, node: "ParamStar") -> Optional[bool]: pass diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index be59a4c9..f28de9c7 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -11955,6 +11955,12 @@ class ParamSlash(BaseMatcherNode): comma: Union[ CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() metadata: Union[ MetadataMatchType, DoNotCareSentinel, diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index ab6b9003..39ca64b1 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -141,11 +141,15 @@ impl<'a> Codegen<'a> for Parameters<'a> { #[cst_node] pub struct ParamSlash<'a> { pub comma: Option>, + pub whitespace_after: ParenthesizableWhitespace<'a>, + + pub(crate) tok: TokenRef<'a>, } impl<'a> ParamSlash<'a> { fn codegen(&self, state: &mut CodegenState<'a>, default_comma: bool) { state.add_token("/"); + self.whitespace_after.codegen(state); match (&self.comma, default_comma) { (Some(comma), _) => comma.codegen(state), (None, true) => state.add_token(", "), @@ -157,8 +161,13 @@ impl<'a> ParamSlash<'a> { impl<'r, 'a> Inflate<'a> for DeflatedParamSlash<'r, 'a> { type Inflated = ParamSlash<'a>; fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after = + parse_parenthesizable_whitespace(config, &mut self.tok.whitespace_after.borrow_mut())?; let comma = self.comma.inflate(config)?; - Ok(Self::Inflated { comma }) + Ok(Self::Inflated { + comma, + whitespace_after, + }) } } diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 94ee24a9..aa7619cb 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -367,19 +367,19 @@ parser! { } rule slash_no_default() -> (Vec>, ParamSlash<'input, 'a>) - = a:param_no_default()+ slash:lit("/") com:comma() { - (a, ParamSlash { comma: Some(com)}) + = a:param_no_default()+ tok:lit("/") com:comma() { + (a, ParamSlash { comma: Some(com), tok }) } - / a:param_no_default()+ slash:lit("/") &lit(")") { - (a, ParamSlash { comma: None }) + / a:param_no_default()+ tok:lit("/") &lit(")") { + (a, ParamSlash { comma: None, tok }) } rule slash_with_default() -> (Vec>, ParamSlash<'input, 'a>) - = a:param_no_default()* b:param_with_default()+ slash:lit("/") c:comma() { - (concat(a, b), ParamSlash { comma: Some(c) }) + = a:param_no_default()* b:param_with_default()+ tok:lit("/") c:comma() { + (concat(a, b), ParamSlash { comma: Some(c), tok }) } - / a:param_no_default()* b:param_with_default()+ slash:lit("/") &lit(")") { - (concat(a, b), ParamSlash { comma: None }) + / a:param_no_default()* b:param_with_default()+ tok:lit("/") &lit(")") { + (concat(a, b), ParamSlash { comma: None, tok }) } rule star_etc() -> StarEtc<'input, 'a> @@ -1056,19 +1056,19 @@ parser! { } rule lambda_slash_no_default() -> (Vec>, ParamSlash<'input, 'a>) - = a:lambda_param_no_default()+ slash:lit("/") com:comma() { - (a, ParamSlash { comma: Some(com) } ) + = a:lambda_param_no_default()+ tok:lit("/") com:comma() { + (a, ParamSlash { comma: Some(com), tok } ) } - / a:lambda_param_no_default()+ slash:lit("/") &lit(":") { - (a, ParamSlash { comma: None }) + / a:lambda_param_no_default()+ tok:lit("/") &lit(":") { + (a, ParamSlash { comma: None, tok }) } rule lambda_slash_with_default() -> (Vec>, ParamSlash<'input, 'a>) - = a:lambda_param_no_default()* b:lambda_param_with_default()+ slash:lit("/") c:comma(){ - (concat(a, b), ParamSlash { comma: Some(c) }) + = a:lambda_param_no_default()* b:lambda_param_with_default()+ tok:lit("/") c:comma(){ + (concat(a, b), ParamSlash { comma: Some(c), tok }) } - / a:lambda_param_no_default()* b:lambda_param_with_default()+ slash:lit("/") &lit(":") { - (concat(a, b), ParamSlash { comma: None }) + / a:lambda_param_no_default()* b:lambda_param_with_default()+ tok:lit("/") &lit(":") { + (concat(a, b), ParamSlash { comma: None, tok }) } rule lambda_star_etc() -> StarEtc<'input, 'a> diff --git a/native/libcst/tests/fixtures/fun_with_func_defs.py b/native/libcst/tests/fixtures/fun_with_func_defs.py index 1a78d687..9e048a75 100644 --- a/native/libcst/tests/fixtures/fun_with_func_defs.py +++ b/native/libcst/tests/fixtures/fun_with_func_defs.py @@ -1,4 +1,6 @@ def f(a, /,): pass +def f(a, / ,): pass +def f(a, / ): pass def f(a, /, c, d, e): pass def f(a, /, c, *, d, e): pass def f(a, /, c, *, d, e, **kwargs): pass @@ -24,6 +26,10 @@ def f(a, /, c, d, e): def f(a, /, c, *, d, e): pass +def foo(a, * + , bar): + pass + def f( a, @@ -43,6 +49,11 @@ def f( ): pass +def say_hello( + self, user: str, / +): + print('Hello ' + user) + def f(a=1, /, b=2, c=4): pass From c894160d4a7cc9cd28b931e36c36f18727930765 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 26 Jun 2022 09:50:40 +0100 Subject: [PATCH 268/632] bump rust dependencies (#714) --- native/Cargo.lock | 343 +++++++++++++---------------- native/libcst/Cargo.toml | 2 +- native/libcst/src/parser/errors.rs | 2 +- 3 files changed, 159 insertions(+), 188 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index f5c747b3..62fb1351 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -30,21 +30,21 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "bitflags" -version = "1.2.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bstr" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90682c8d613ad3373e66de8c6411e0ae2ab2571e879d2efbf73558cc66f21279" +checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" dependencies = [ "lazy_static", "memchr", @@ -54,9 +54,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.7.0" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c59e7af012c713f529e7a3ee57ce9b31ddd858d4b512923602f74608b009631" +checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" [[package]] name = "cast" @@ -84,9 +84,9 @@ dependencies = [ [[package]] name = "clap" -version = "2.33.3" +version = "2.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ "bitflags", "textwrap", @@ -95,16 +95,16 @@ dependencies = [ [[package]] name = "criterion" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab327ed7354547cc2ef43cbe20ef68b988e70b4b593cbd66a2a61733123a3d23" +checksum = "1604dafd25fba2fe2d5895a9da139f8dc9b319a5fe5354ca137cbbce4e178d10" dependencies = [ "atty", "cast", "clap", "criterion-plot", "csv", - "itertools 0.10.1", + "itertools", "lazy_static", "num-traits", "oorandom", @@ -130,19 +130,19 @@ dependencies = [ [[package]] name = "criterion-plot" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e022feadec601fba1649cfa83586381a4ad31c6bf3a9ab7d408118b05dd9889d" +checksum = "d00996de9f2f7559f7f4dc286073197f83e92256a59ed395f9aac01fe717da57" dependencies = [ "cast", - "itertools 0.9.0", + "itertools", ] [[package]] name = "crossbeam-channel" -version = "0.5.1" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" +checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c" dependencies = [ "cfg-if", "crossbeam-utils", @@ -161,25 +161,26 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.5" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" +checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d" dependencies = [ + "autocfg", "cfg-if", "crossbeam-utils", - "lazy_static", "memoffset", + "once_cell", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" +checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83" dependencies = [ "cfg-if", - "lazy_static", + "once_cell", ] [[package]] @@ -190,7 +191,7 @@ checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" dependencies = [ "bstr", "csv-core", - "itoa", + "itoa 0.4.8", "ryu", "serde", ] @@ -224,9 +225,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "half" -version = "1.7.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "hermit-abi" @@ -239,65 +240,45 @@ dependencies = [ [[package]] name = "indoc" -version = "0.3.6" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" -dependencies = [ - "indoc-impl", - "proc-macro-hack", -] - -[[package]] -name = "indoc-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" -dependencies = [ - "proc-macro-hack", - "proc-macro2", - "quote", - "syn", - "unindent", -] +checksum = "05a0bd019339e5d968b37855180087b7b9d512c5046fbd244cf8c95687927d6e" [[package]] name = "instant" -version = "0.1.10" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee0328b1209d157ef001c94dd85b4f8f64139adb0eac2659f4b08382b2f474d" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ "cfg-if", ] [[package]] name = "itertools" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" dependencies = [ "either", ] [[package]] name = "itoa" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" [[package]] name = "js-sys" -version = "0.3.51" +version = "0.3.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83bdfbace3a0e81a4253f73b49e960b053e396a11012cbd49b9b74d6a2b67062" +checksum = "c3fac17f7123a73ca62df411b1bf727ccc805daa070338fda671c86dac1bdc27" dependencies = [ "wasm-bindgen", ] @@ -310,9 +291,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.98" +version = "0.2.126" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "320cfe77175da3a483efed4bc0adc1968ca050b098ce4f2f1c13a56626128790" +checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" [[package]] name = "libcst" @@ -322,10 +303,10 @@ dependencies = [ "criterion", "criterion-cycles-per-byte", "difference", - "itertools 0.10.1", + "itertools", "libcst_derive", "once_cell", - "paste 1.0.5", + "paste", "peg", "pyo3", "regex", @@ -343,51 +324,52 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.4" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0382880606dff6d15c9476c416d18690b72742aa7b605bb6dd6ec9030fbf07eb" +checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" dependencies = [ + "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.14" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if", ] [[package]] name = "memchr" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memoffset" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" dependencies = [ "autocfg", ] [[package]] name = "num-traits" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", ] [[package]] name = "num_cpus" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" dependencies = [ "hermit-abi", "libc", @@ -395,9 +377,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.8.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" +checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" [[package]] name = "oorandom" @@ -407,9 +389,9 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] name = "parking_lot" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", "lock_api", @@ -418,9 +400,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018" +checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" dependencies = [ "cfg-if", "instant", @@ -432,28 +414,9 @@ dependencies = [ [[package]] name = "paste" -version = "0.1.18" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880" -dependencies = [ - "paste-impl", - "proc-macro-hack", -] - -[[package]] -name = "paste" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" - -[[package]] -name = "paste-impl" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6" -dependencies = [ - "proc-macro-hack", -] +checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" [[package]] name = "peg" @@ -510,52 +473,58 @@ dependencies = [ "plotters-backend", ] -[[package]] -name = "proc-macro-hack" -version = "0.5.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" - [[package]] name = "proc-macro2" -version = "1.0.28" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612" +checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" dependencies = [ - "unicode-xid", + "unicode-ident", ] [[package]] name = "pyo3" -version = "0.14.5" +version = "0.16.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35100f9347670a566a67aa623369293703322bb9db77d99d7df7313b575ae0c8" +checksum = "1e6302e85060011447471887705bb7838f14aba43fcb06957d823739a496b3dc" dependencies = [ "cfg-if", "indoc", "libc", "parking_lot", - "paste 0.1.18", "pyo3-build-config", + "pyo3-ffi", "pyo3-macros", "unindent", ] [[package]] name = "pyo3-build-config" -version = "0.14.5" +version = "0.16.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d12961738cacbd7f91b7c43bc25cfeeaa2698ad07a04b3be0aa88b950865738f" +checksum = "b5b65b546c35d8a3b1b2f0ddbac7c6a569d759f357f2b9df884f5d6b719152c8" dependencies = [ "once_cell", + "target-lexicon", +] + +[[package]] +name = "pyo3-ffi" +version = "0.16.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c275a07127c1aca33031a563e384ffdd485aee34ef131116fcd58e3430d1742b" +dependencies = [ + "libc", + "pyo3-build-config", ] [[package]] name = "pyo3-macros" -version = "0.14.5" +version = "0.16.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0bc5215d704824dfddddc03f93cb572e1155c68b6761c37005e1c288808ea8" +checksum = "284fc4485bfbcc9850a6d661d627783f18d19c2ab55880b021671c4ba83e90f7" dependencies = [ + "proc-macro2", "pyo3-macros-backend", "quote", "syn", @@ -563,30 +532,29 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.14.5" +version = "0.16.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71623fc593224afaab918aa3afcaf86ed2f43d34f6afde7f3922608f253240df" +checksum = "53bda0f58f73f5c5429693c96ed57f7abdb38fdfc28ae06da4101a257adb7faf" dependencies = [ "proc-macro2", - "pyo3-build-config", "quote", "syn", ] [[package]] name = "quote" -version = "1.0.9" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" +checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" dependencies = [ "proc-macro2", ] [[package]] name = "rayon" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" +checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" dependencies = [ "autocfg", "crossbeam-deque", @@ -596,31 +564,30 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" +checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "lazy_static", "num_cpus", ] [[package]] name = "redox_syscall" -version = "0.2.9" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab49abadf3f9e1c4bc499e8845e152ad87d2ad2d30371841171169e9d75feee" +checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" dependencies = [ "bitflags", ] [[package]] name = "regex" -version = "1.5.5" +version = "1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" +checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" dependencies = [ "aho-corasick", "memchr", @@ -635,9 +602,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.6.25" +version = "0.6.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" +checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" [[package]] name = "rustc_version" @@ -650,9 +617,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.5" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" +checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" [[package]] name = "same-file" @@ -671,24 +638,21 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "semver" -version = "1.0.3" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f3aac57ee7f3272d8395c6e4f502f434f0e289fcd62876f70daa008c20dcabe" +checksum = "a41d061efea015927ac527063765e73601444cdc344ba855bc7bd44578b25e1c" [[package]] name = "serde" -version = "1.0.126" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03" -dependencies = [ - "serde_derive", -] +checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" [[package]] name = "serde_cbor" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e18acfa2f90e8b735b2836ab8d538de304cbb6729a7360729ea5a895d15a622" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" dependencies = [ "half", "serde", @@ -696,9 +660,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.126" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "963a7dbc9895aeac7ac90e74f34a5d5261828f79df35cbed41e10189d3804d43" +checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" dependencies = [ "proc-macro2", "quote", @@ -707,32 +671,38 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.64" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799e97dc9fdae36a5c8b8f2cae9ce2ee9fdce2058c57a93e6099d919fd982f79" +checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" dependencies = [ - "itoa", + "itoa 1.0.2", "ryu", "serde", ] [[package]] name = "smallvec" -version = "1.6.1" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" +checksum = "cc88c725d61fc6c3132893370cac4a0200e3fedf5da8331c570664b1987f5ca2" [[package]] name = "syn" -version = "1.0.74" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1873d832550d4588c3dbc20f01361ab00bfe741048f71e3fecf145a7cc18b29c" +checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" dependencies = [ "proc-macro2", "quote", - "unicode-xid", + "unicode-ident", ] +[[package]] +name = "target-lexicon" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1" + [[package]] name = "termcolor" version = "1.1.3" @@ -753,18 +723,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.26" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93119e4feac1cbe6c798c34d3a53ea0026b0b1de6a120deef895137c0529bfe2" +checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.26" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "060d69a0afe7796bf42e9e2ff91f5ee691fb15c53d38b4b62a9a53eb23164745" +checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ "proc-macro2", "quote", @@ -792,35 +762,36 @@ dependencies = [ [[package]] name = "trybuild" -version = "1.0.53" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d664de8ea7e531ad4c0f5a834f20b8cb2b8e6dfe88d05796ee7887518ed67b9" +checksum = "764b9e244b482a9b81bde596aa37aa6f1347bf8007adab25e59f901b32b4e0a0" dependencies = [ "glob", - "lazy_static", + "once_cell", "serde", + "serde_derive", "serde_json", "termcolor", "toml", ] [[package]] -name = "unicode-width" -version = "0.1.8" +name = "unicode-ident" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" +checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" [[package]] -name = "unicode-xid" -version = "0.2.2" +name = "unicode-width" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" [[package]] name = "unindent" -version = "0.1.7" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7" +checksum = "52fee519a3e570f7df377a06a1a7775cdbfb7aa460be7e08de2b1f0e69973a44" [[package]] name = "walkdir" @@ -835,9 +806,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.74" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54ee1d4ed486f78874278e63e4069fc1ab9f6a18ca492076ffb90c5eb2997fd" +checksum = "7c53b543413a17a202f4be280a7e5c62a1c69345f5de525ee64f8cfdbc954994" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -845,9 +816,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.74" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b33f6a0694ccfea53d94db8b2ed1c3a8a4c86dd936b13b9f0a15ec4a451b900" +checksum = "5491a68ab4500fa6b4d726bd67408630c3dbe9c4fe7bda16d5c82a1fd8c7340a" dependencies = [ "bumpalo", "lazy_static", @@ -860,9 +831,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.74" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "088169ca61430fe1e58b8096c24975251700e7b1f6fd91cc9d59b04fb9b18bd4" +checksum = "c441e177922bc58f1e12c022624b6216378e5febc2f0533e41ba443d505b80aa" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -870,9 +841,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.74" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be2241542ff3d9f241f5e2cb6dd09b37efe786df8851c54957683a49f0987a97" +checksum = "7d94ac45fcf608c1f45ef53e748d35660f168490c10b23704c7779ab8f5c3048" dependencies = [ "proc-macro2", "quote", @@ -883,15 +854,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.74" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7cff876b8f18eed75a66cf49b65e7f967cb354a7aa16003fb55dbfd25b44b4f" +checksum = "6a89911bd99e5f3659ec4acf9c4d93b0a90fe4a2a11f15328472058edc5261be" [[package]] name = "web-sys" -version = "0.3.51" +version = "0.3.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e828417b379f3df7111d3a2a9e5753706cae29c41f7c4029ee9fd77f3e09e582" +checksum = "2fed94beee57daf8dd7d51f2b15dc2bcde92d7a72304cdf662a4371008b71b90" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index b70f1e8e..e3bcad5b 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -30,7 +30,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.4" -pyo3 = { version = "0.14.4", optional = true } +pyo3 = { version = "0.16.5", optional = true } thiserror = "1.0.23" peg = "0.8.0" chic = "1.2.2" diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index cc58bfa2..79132ce1 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -67,7 +67,7 @@ mod py_error { ] .into_py_dict(py); let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); - PyErr::from_instance( + PyErr::from_value( libcst .getattr("ParserSyntaxError") .expect("ParserSyntaxError not found") From 343f56f6071c2d99ea634702c82b714b275e1469 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 4 Jul 2022 14:45:42 +0100 Subject: [PATCH 269/632] [parser] bail on deeply nested expressions (#718) --- libcst/_nodes/tests/test_binary_op.py | 16 ++++++++++++++++ native/libcst/Cargo.toml | 2 +- native/libcst/src/parser/grammar.rs | 26 ++++++++++++++++++-------- 3 files changed, 35 insertions(+), 9 deletions(-) diff --git a/libcst/_nodes/tests/test_binary_op.py b/libcst/_nodes/tests/test_binary_op.py index b6ac8b09..b86af9fe 100644 --- a/libcst/_nodes/tests/test_binary_op.py +++ b/libcst/_nodes/tests/test_binary_op.py @@ -8,6 +8,7 @@ from typing import Any import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -174,3 +175,18 @@ class BinaryOperationTest(CSTNodeTest): ) def test_invalid(self, **kwargs: Any) -> None: self.assert_invalid(**kwargs) + + @data_provider( + ( + { + "code": '"a"' * 6000, + "parser": parse_expression, + }, + { + "code": "[_" + " for _ in _" * 6000 + "]", + "parser": parse_expression, + }, + ) + ) + def test_parse_error(self, **kwargs: Any) -> None: + self.assert_parses(**kwargs, expect_success=not is_native()) diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index e3bcad5b..4b57fc12 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -30,7 +30,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.4" -pyo3 = { version = "0.16.5", optional = true } +pyo3 = { version = "0.16", optional = true } thiserror = "1.0.23" peg = "0.8.0" chic = "1.2.2" diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index aa7619cb..decf983b 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -85,6 +85,8 @@ impl<'input, 'a: 'input> ParseElem<'input> for TokVec<'a> { } } +const MAX_RECURSION_DEPTH: usize = 3000; + parser! { pub grammar python<'a>(input: &'a str) for TokVec<'a> { @@ -1117,7 +1119,7 @@ parser! { rule strings() -> String<'input, 'a> = s:(str:tok(STRING, "STRING") t:&_ {(make_string(str), t)} - / str:fstring() t:&_ {(String::Formatted(str), t)})+ { + / str:fstring() t:&_ {(String::Formatted(str), t)})+ {? make_strings(s) } @@ -1171,7 +1173,7 @@ parser! { // Comprehensions & generators rule for_if_clauses() -> CompFor<'input, 'a> - = c:for_if_clause()+ { merge_comp_fors(c) } + = c:for_if_clause()+ {? merge_comp_fors(c) } rule for_if_clause() -> CompFor<'input, 'a> = asy:_async() f:lit("for") tgt:star_targets() i:lit("in") @@ -2240,14 +2242,19 @@ fn make_bare_genexp<'input, 'a>( } } -fn merge_comp_fors<'input, 'a>(comp_fors: Vec>) -> CompFor<'input, 'a> { +fn merge_comp_fors<'input, 'a>( + comp_fors: Vec>, +) -> GrammarResult> { + if comp_fors.len() > MAX_RECURSION_DEPTH { + return Err("shallower comprehension"); + } let mut it = comp_fors.into_iter().rev(); let first = it.next().expect("cant merge empty comp_fors"); - it.fold(first, |acc, curr| CompFor { + Ok(it.fold(first, |acc, curr| CompFor { inner_for_in: Some(Box::new(acc)), ..curr - }) + })) } fn make_left_bracket<'input, 'a>(tok: TokenRef<'input, 'a>) -> LeftSquareBracket<'input, 'a> { @@ -2816,10 +2823,13 @@ fn make_string<'input, 'a>(tok: TokenRef<'input, 'a>) -> String<'input, 'a> { fn make_strings<'input, 'a>( s: Vec<(String<'input, 'a>, TokenRef<'input, 'a>)>, -) -> String<'input, 'a> { +) -> GrammarResult> { + if s.len() > MAX_RECURSION_DEPTH { + return Err("shorter concatenated string"); + } let mut strings = s.into_iter().rev(); let (first, _) = strings.next().expect("no strings to make a string of"); - strings.fold(first, |acc, (str, tok)| { + Ok(strings.fold(first, |acc, (str, tok)| { let ret: String<'input, 'a> = String::Concatenated(ConcatenatedString { left: Box::new(str), right: Box::new(acc), @@ -2828,7 +2838,7 @@ fn make_strings<'input, 'a>( right_tok: tok, }); ret - }) + })) } fn make_fstring_expression<'input, 'a>( From 7042623acee4d7e0d36ede5d4580af967af27241 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 4 Jul 2022 14:49:43 +0100 Subject: [PATCH 270/632] bump version to 0.4.6 --- CHANGELOG.md | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 967565d3..7a63da2e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,25 @@ +# 0.4.6 - 2022-07-04 + +## New Contributors +- @superbobry made their first contribution in https://github.com/Instagram/LibCST/pull/702 + +## Fixed +- convert_type_comments now preserves comments following type comments by @superbobry in https://github.com/Instagram/LibCST/pull/702 +- QualifiedNameProvider optimizations + - Cache the scope name prefix to prevent scope traversal in a tight loop by @lpetre in https://github.com/Instagram/LibCST/pull/708 + - Faster qualified name formatting by @lpetre in https://github.com/Instagram/LibCST/pull/710 + - Prevent unnecessary work in Scope.get_qualified_names_for_ by @lpetre in https://github.com/Instagram/LibCST/pull/709 +- Fix parsing of parenthesized empty tuples by @zsol in https://github.com/Instagram/LibCST/pull/712 +- Support whitespace after ParamSlash by @zsol in https://github.com/Instagram/LibCST/pull/713 +- [parser] bail on deeply nested expressions by @zsol in https://github.com/Instagram/LibCST/pull/718 + # 0.4.5 - 2022-06-17 ## New Contributors - @zzl0 made their first contribution in https://github.com/Instagram/LibCST/pull/704 -## Fixes +## Fixed - Only skip supported escaped characters in f-strings by @zsol in https://github.com/Instagram/LibCST/pull/700 - Escaping quote characters in raw string literals causes a tokenizer error by @zsol in https://github.com/Instagram/LibCST/issues/668 From b3eda508d40b82306fe136be185a8c2d796f1315 Mon Sep 17 00:00:00 2001 From: Luke Petre Date: Tue, 5 Jul 2022 11:25:39 +0100 Subject: [PATCH 271/632] Fixing prefix matching bug from 0.4.6 (#719) --- libcst/metadata/scope_provider.py | 3 ++- libcst/metadata/tests/test_scope_provider.py | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 26cfcb90..88de3fc4 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -560,7 +560,8 @@ class Scope(abc.ABC): if prefix in self: assignments = self[prefix] break - prefix = prefix[: prefix.rfind(".")] + idx = prefix.rfind(".") + prefix = None if idx == -1 else prefix[:idx] if not isinstance(node, str): for assignment in assignments: diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 267a5901..9908cb4c 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -1964,3 +1964,21 @@ class ScopeProviderTest(UnitTest): self.assertEqual(len(assignment.references), 1) references = list(assignment.references) self.assertTrue(references[0].is_annotation) + + def test_prefix_match(self) -> None: + """Verify that a name doesn't overmatch on prefix""" + m, scopes = get_scope_metadata_provider( + """ + def something(): + ... + """ + ) + scope = scopes[m] + self.assertEqual( + scope.get_qualified_names_for(cst.Name("something")), + {QualifiedName(name="something", source=QualifiedNameSource.LOCAL)}, + ) + self.assertEqual( + scope.get_qualified_names_for(cst.Name("something_else")), + set(), + ) From 7cb229d175b79cbf392a34ecb58e79686c73a82d Mon Sep 17 00:00:00 2001 From: Chenguang Zhu Date: Sat, 9 Jul 2022 03:34:29 -0400 Subject: [PATCH 272/632] Implement lazy loading mechanism for QualifiedNameProvider (#720) * Implement lazy loading mechanism for expensive metadata providers * Add support for lazy values in metadata matchers * Fix type issues and implement lazy value support in base metadata provider too * Add unit tests for BaseMetadataProvider Co-authored-by: Zsolt Dollenstein --- libcst/_metadata_dependent.py | 33 ++++++++++- libcst/matchers/_matcher_base.py | 7 ++- libcst/metadata/base_provider.py | 29 +++++----- libcst/metadata/name_provider.py | 6 +- libcst/metadata/tests/test_base_provider.py | 61 +++++++++++++++++++++ libcst/metadata/tests/test_name_provider.py | 20 ++++++- 6 files changed, 135 insertions(+), 21 deletions(-) diff --git a/libcst/_metadata_dependent.py b/libcst/_metadata_dependent.py index 6a768270..4faf7472 100644 --- a/libcst/_metadata_dependent.py +++ b/libcst/_metadata_dependent.py @@ -7,14 +7,17 @@ import inspect from abc import ABC from contextlib import contextmanager from typing import ( + Callable, cast, ClassVar, Collection, + Generic, Iterator, Mapping, Type, TYPE_CHECKING, TypeVar, + Union, ) if TYPE_CHECKING: @@ -29,7 +32,28 @@ if TYPE_CHECKING: _T = TypeVar("_T") -_UNDEFINED_DEFAULT = object() + +class _UNDEFINED_DEFAULT: + pass + + +class LazyValue(Generic[_T]): + """ + The class for implementing a lazy metadata loading mechanism that improves the + performance when retriving expensive metadata (e.g., qualified names). Providers + including :class:`~libcst.metadata.QualifiedNameProvider` use this class to load + the metadata of a certain node lazily when calling + :func:`~libcst.MetadataDependent.get_metadata`. + """ + + def __init__(self, callable: Callable[[], _T]) -> None: + self.callable = callable + self.return_value: Union[_T, Type[_UNDEFINED_DEFAULT]] = _UNDEFINED_DEFAULT + + def __call__(self) -> _T: + if self.return_value is _UNDEFINED_DEFAULT: + self.return_value = self.callable() + return cast(_T, self.return_value) class MetadataDependent(ABC): @@ -107,6 +131,9 @@ class MetadataDependent(ABC): ) if default is not _UNDEFINED_DEFAULT: - return cast(_T, self.metadata[key].get(node, default)) + value = self.metadata[key].get(node, default) else: - return cast(_T, self.metadata[key][node]) + value = self.metadata[key][node] + if isinstance(value, LazyValue): + value = value() + return cast(_T, value) diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 64670be4..d8f69ec6 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -31,6 +31,7 @@ from typing import ( import libcst import libcst.metadata as meta from libcst import FlattenSentinel, MaybeSentinel, RemovalSentinel +from libcst._metadata_dependent import LazyValue class DoNotCareSentinel(Enum): @@ -1544,7 +1545,11 @@ def _construct_metadata_fetcher_wrapper( if provider not in metadata: metadata[provider] = wrapper.resolve(provider) - return metadata.get(provider, {}).get(node, _METADATA_MISSING_SENTINEL) + node_metadata = metadata.get(provider, {}).get(node, _METADATA_MISSING_SENTINEL) + if isinstance(node_metadata, LazyValue): + node_metadata = node_metadata() + + return node_metadata return _fetch diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index 69af2dce..1c113f57 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -7,7 +7,6 @@ from pathlib import Path from types import MappingProxyType from typing import ( Callable, - cast, Generic, List, Mapping, @@ -16,12 +15,14 @@ from typing import ( Type, TYPE_CHECKING, TypeVar, + Union, ) from libcst._batched_visitor import BatchableCSTVisitor from libcst._metadata_dependent import ( _T as _MetadataT, _UNDEFINED_DEFAULT, + LazyValue, MetadataDependent, ) from libcst._visitors import CSTVisitor @@ -36,6 +37,7 @@ ProviderT = Type["BaseMetadataProvider[object]"] # BaseMetadataProvider[int] would be a subtype of BaseMetadataProvider[object], so the # typevar is covariant. _ProvidedMetadataT = TypeVar("_ProvidedMetadataT", covariant=True) +MaybeLazyMetadataT = Union[LazyValue[_ProvidedMetadataT], _ProvidedMetadataT] # We can't use an ABCMeta here, because of metaclass conflicts @@ -52,16 +54,16 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): # # N.B. This has some typing variance problems. See `set_metadata` for an # explanation. - _computed: MutableMapping["CSTNode", _ProvidedMetadataT] + _computed: MutableMapping["CSTNode", MaybeLazyMetadataT] - #: Implement gen_cache to indicate the matadata provider depends on cache from external + #: Implement gen_cache to indicate the metadata provider depends on cache from external #: system. This function will be called by :class:`~libcst.metadata.FullRepoManager` #: to compute required cache object per file path. gen_cache: Optional[Callable[[Path, List[str], int], Mapping[str, object]]] = None def __init__(self, cache: object = None) -> None: super().__init__() - self._computed = {} + self._computed: MutableMapping["CSTNode", MaybeLazyMetadataT] = {} if self.gen_cache and cache is None: # The metadata provider implementation is responsible to store and use cache. raise Exception( @@ -71,7 +73,7 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): def _gen( self, wrapper: "MetadataWrapper" - ) -> Mapping["CSTNode", _ProvidedMetadataT]: + ) -> Mapping["CSTNode", MaybeLazyMetadataT]: """ Resolves and returns metadata mapping for the module in ``wrapper``. @@ -93,11 +95,7 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): """ ... - # pyre-ignore[46]: The covariant `value` isn't type-safe because we write it to - # pyre: `self._computed`, however we assume that only one subclass in the MRO chain - # pyre: will ever call `set_metadata`, so it's okay for our purposes. There's no - # pyre: sane way to redesign this API so that it doesn't have this problem. - def set_metadata(self, node: "CSTNode", value: _ProvidedMetadataT) -> None: + def set_metadata(self, node: "CSTNode", value: MaybeLazyMetadataT) -> None: """ Record a metadata value ``value`` for ``node``. """ @@ -107,7 +105,9 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): self, key: Type["BaseMetadataProvider[_MetadataT]"], node: "CSTNode", - default: _MetadataT = _UNDEFINED_DEFAULT, + default: Union[ + MaybeLazyMetadataT, Type[_UNDEFINED_DEFAULT] + ] = _UNDEFINED_DEFAULT, ) -> _MetadataT: """ The same method as :func:`~libcst.MetadataDependent.get_metadata` except @@ -116,9 +116,12 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): """ if key is type(self): if default is not _UNDEFINED_DEFAULT: - return cast(_MetadataT, self._computed.get(node, default)) + ret = self._computed.get(node, default) else: - return cast(_MetadataT, self._computed[node]) + ret = self._computed[node] + if isinstance(ret, LazyValue): + return ret() + return ret return super().get_metadata(key, node, default) diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py index 00753504..60d8763e 100644 --- a/libcst/metadata/name_provider.py +++ b/libcst/metadata/name_provider.py @@ -8,7 +8,7 @@ from pathlib import Path from typing import Collection, List, Mapping, Optional, Union import libcst as cst -from libcst._metadata_dependent import MetadataDependent +from libcst._metadata_dependent import LazyValue, MetadataDependent from libcst.helpers.module import calculate_module_and_package, ModuleNameAndPackage from libcst.metadata.base_provider import BatchableMetadataProvider from libcst.metadata.scope_provider import ( @@ -78,7 +78,9 @@ class QualifiedNameVisitor(cst.CSTVisitor): def on_visit(self, node: cst.CSTNode) -> bool: scope = self.provider.get_metadata(ScopeProvider, node, None) if scope: - self.provider.set_metadata(node, scope.get_qualified_names_for(node)) + self.provider.set_metadata( + node, LazyValue(lambda: scope.get_qualified_names_for(node)) + ) else: self.provider.set_metadata(node, set()) super().on_visit(node) diff --git a/libcst/metadata/tests/test_base_provider.py b/libcst/metadata/tests/test_base_provider.py index 0bf4ca51..26ebde70 100644 --- a/libcst/metadata/tests/test_base_provider.py +++ b/libcst/metadata/tests/test_base_provider.py @@ -7,6 +7,7 @@ from typing import cast import libcst as cst from libcst import parse_module +from libcst._metadata_dependent import LazyValue from libcst.metadata import ( BatchableMetadataProvider, MetadataWrapper, @@ -75,3 +76,63 @@ class BaseMetadataProviderTest(UnitTest): self.assertEqual(metadata[SimpleProvider][pass_], 1) self.assertEqual(metadata[SimpleProvider][return_], 2) self.assertEqual(metadata[SimpleProvider][pass_2], 1) + + def test_lazy_visitor_provider(self) -> None: + class SimpleLazyProvider(VisitorMetadataProvider[int]): + """ + Sets metadata on every node to a callable that returns 1. + """ + + def on_visit(self, node: cst.CSTNode) -> bool: + self.set_metadata(node, LazyValue(lambda: 1)) + return True + + wrapper = MetadataWrapper(parse_module("pass; return")) + module = wrapper.module + pass_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] + return_ = cast(cst.SimpleStatementLine, module.body[0]).body[1] + + provider = SimpleLazyProvider() + metadata = provider._gen(wrapper) + + # Check access on provider + self.assertEqual(provider.get_metadata(SimpleLazyProvider, module), 1) + self.assertEqual(provider.get_metadata(SimpleLazyProvider, pass_), 1) + self.assertEqual(provider.get_metadata(SimpleLazyProvider, return_), 1) + + # Check returned mapping + self.assertTrue(isinstance(metadata[module], LazyValue)) + self.assertTrue(isinstance(metadata[pass_], LazyValue)) + self.assertTrue(isinstance(metadata[return_], LazyValue)) + + def testlazy_batchable_provider(self) -> None: + class SimpleLazyProvider(BatchableMetadataProvider[int]): + """ + Sets metadata on every pass node to a callable that returns 1, + and every return node to a callable that returns 2. + """ + + def visit_Pass(self, node: cst.Pass) -> None: + self.set_metadata(node, LazyValue(lambda: 1)) + + def visit_Return(self, node: cst.Return) -> None: + self.set_metadata(node, LazyValue(lambda: 2)) + + wrapper = MetadataWrapper(parse_module("pass; return; pass")) + module = wrapper.module + pass_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] + return_ = cast(cst.SimpleStatementLine, module.body[0]).body[1] + pass_2 = cast(cst.SimpleStatementLine, module.body[0]).body[2] + + provider = SimpleLazyProvider() + metadata = _gen_batchable(wrapper, [provider]) + + # Check access on provider + self.assertEqual(provider.get_metadata(SimpleLazyProvider, pass_), 1) + self.assertEqual(provider.get_metadata(SimpleLazyProvider, return_), 2) + self.assertEqual(provider.get_metadata(SimpleLazyProvider, pass_2), 1) + + # Check returned mapping + self.assertTrue(isinstance(metadata[SimpleLazyProvider][pass_], LazyValue)) + self.assertTrue(isinstance(metadata[SimpleLazyProvider][return_], LazyValue)) + self.assertTrue(isinstance(metadata[SimpleLazyProvider][pass_2], LazyValue)) diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index 9b0b409f..9f381368 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -10,6 +10,7 @@ from typing import Collection, Dict, Mapping, Optional, Set, Tuple import libcst as cst from libcst import ensure_type +from libcst._nodes.base import CSTNode from libcst.metadata import ( FullyQualifiedNameProvider, MetadataWrapper, @@ -22,11 +23,26 @@ from libcst.metadata.name_provider import FullyQualifiedNameVisitor from libcst.testing.utils import data_provider, UnitTest +class QNameVisitor(cst.CSTVisitor): + + METADATA_DEPENDENCIES = (QualifiedNameProvider,) + + def __init__(self) -> None: + self.qnames: Dict["CSTNode", Collection[QualifiedName]] = {} + + def on_visit(self, node: cst.CSTNode) -> bool: + qname = self.get_metadata(QualifiedNameProvider, node) + self.qnames[node] = qname + return True + + def get_qualified_name_metadata_provider( module_str: str, ) -> Tuple[cst.Module, Mapping[cst.CSTNode, Collection[QualifiedName]]]: wrapper = MetadataWrapper(cst.parse_module(dedent(module_str))) - return wrapper.module, wrapper.resolve(QualifiedNameProvider) + visitor = QNameVisitor() + wrapper.visit(visitor) + return wrapper.module, visitor.qnames def get_qualified_names(module_str: str) -> Set[QualifiedName]: @@ -358,7 +374,7 @@ class QualifiedNameProviderTest(UnitTest): else: import f import a.b as f - + f() """ ) From c85f9bf19de2aeb848953e326e1130e40847ca66 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 12 Jul 2022 15:52:10 +0100 Subject: [PATCH 273/632] bump version to 0.4.7 --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a63da2e..3a6dfa5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +# 0.4.7 - 2022-07-12 + +## New Contributors +* @Chenguang-Zhu made their first contribution in https://github.com/Instagram/LibCST/pull/720 + +## Fixed +* Fix get_qualified_names_for matching on prefixes of the given name by @lpetre in https://github.com/Instagram/LibCST/pull/719 + +## Added +* Implement lazy loading mechanism for expensive metadata providers by @Chenguang-Zhu in https://github.com/Instagram/LibCST/pull/720 + # 0.4.6 - 2022-07-04 ## New Contributors From 367b14b0526660981642c943cd0fa28851700eab Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 15 Jul 2022 15:26:58 +0100 Subject: [PATCH 274/632] test using python 3.11 beta versions (#723) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3608bbbc..4f3fed69 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,7 +14,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.7, 3.8, 3.9, "3.10"] + python-version: [3.7, 3.8, 3.9, "3.10", "3.11.0-alpha - 3.11"] parser: [pure, native] steps: - uses: actions/checkout@v1 From 47e5ea15e1c92358f3c7da6af11ac46fb80ba78d Mon Sep 17 00:00:00 2001 From: zzl Date: Thu, 4 Aug 2022 06:33:26 -0400 Subject: [PATCH 275/632] Fix parse error message for number parsing (#724) Co-authored-by: zzl0 --- libcst/_parser/conversions/expression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index a3ba90ac..1a46de2a 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -870,7 +870,7 @@ def convert_atom_basic( Imaginary(child.string), child.whitespace_before ) else: - raise Exception("Unparseable number {child.string}") + raise Exception(f"Unparseable number {child.string}") else: raise Exception(f"Logic error, unexpected token {child.type.name}") From 345c7ba89b884154f469cf503d56f3f80b01a62b Mon Sep 17 00:00:00 2001 From: Dhruv Manilawala Date: Mon, 8 Aug 2022 16:06:39 +0530 Subject: [PATCH 276/632] add dependabot config file (#728) --- .github/dependabot.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..eb037e18 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,13 @@ +# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + +version: 2 +updates: + - package-ecosystem: pip + directory: "/" + schedule: + interval: weekly + + - package-ecosystem: cargo + directory: "/native" + schedule: + interval: weekly From 2e441cb50ffce735fb9e0fcf9a1d215d8890ec0d Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 8 Aug 2022 13:44:04 +0200 Subject: [PATCH 277/632] pin flake8 below 5.0 (#739) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 640b8af7..e54aa855 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ black==22.3.0 coverage>=4.5.4 fixit==0.1.1 -flake8>=3.7.8 +flake8>=3.7.8,<5 git+https://github.com/jimmylai/sphinx.git@slots_type_annotation hypothesis>=4.36.0 hypothesmith>=0.0.4 From 7307a6918f444676206847b6bc5640923e5625bf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 13:11:17 +0100 Subject: [PATCH 278/632] Bump black from 22.3.0 to 22.6.0 (#735) Bumps [black](https://github.com/psf/black) from 22.3.0 to 22.6.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.3.0...22.6.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index e54aa855..ebbf91ad 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -black==22.3.0 +black==22.6.0 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8,<5 From 9b55dba06eeef81d556656b45d23ada0176b274f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 13:13:46 +0100 Subject: [PATCH 279/632] Bump criterion from 0.3.5 to 0.3.6 in /native (#731) Bumps [criterion](https://github.com/bheisler/criterion.rs) from 0.3.5 to 0.3.6. - [Release notes](https://github.com/bheisler/criterion.rs/releases) - [Changelog](https://github.com/bheisler/criterion.rs/blob/master/CHANGELOG.md) - [Commits](https://github.com/bheisler/criterion.rs/compare/0.3.5...0.3.6) --- updated-dependencies: - dependency-name: criterion dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 14 ++++++++++---- native/libcst/Cargo.toml | 2 +- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 62fb1351..6ab20972 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -67,6 +67,12 @@ dependencies = [ "rustc_version", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cfg-if" version = "1.0.0" @@ -95,12 +101,12 @@ dependencies = [ [[package]] name = "criterion" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1604dafd25fba2fe2d5895a9da139f8dc9b319a5fe5354ca137cbbce4e178d10" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" dependencies = [ "atty", - "cast", + "cast 0.3.0", "clap", "criterion-plot", "csv", @@ -134,7 +140,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d00996de9f2f7559f7f4dc286073197f83e92256a59ed395f9aac01fe717da57" dependencies = [ - "cast", + "cast 0.2.7", "itertools", ] diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 4b57fc12..c8bffddc 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -40,7 +40,7 @@ regex = "1.5.5" libcst_derive = { path = "../libcst_derive" } [dev-dependencies] -criterion = { version = "0.3.4", features = ["html_reports"] } +criterion = { version = "0.3.6", features = ["html_reports"] } criterion-cycles-per-byte = "0.1" difference = "2.0.0" From 8c29b395c21d7e949baaa59b5889f19af1cff9b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 13:13:59 +0100 Subject: [PATCH 280/632] Bump trybuild from 1.0.63 to 1.0.64 in /native (#730) Bumps [trybuild](https://github.com/dtolnay/trybuild) from 1.0.63 to 1.0.64. - [Release notes](https://github.com/dtolnay/trybuild/releases) - [Commits](https://github.com/dtolnay/trybuild/compare/1.0.63...1.0.64) --- updated-dependencies: - dependency-name: trybuild dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 6ab20972..d67404b1 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -768,9 +768,9 @@ dependencies = [ [[package]] name = "trybuild" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "764b9e244b482a9b81bde596aa37aa6f1347bf8007adab25e59f901b32b4e0a0" +checksum = "e7f408301c7480f9e6294eb779cfc907f54bd901a9660ef24d7f233ed5376485" dependencies = [ "glob", "once_cell", From dbfd83d811760e82b7251d72812306a844f7cfaa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 13:14:10 +0100 Subject: [PATCH 281/632] Bump regex from 1.5.6 to 1.6.0 in /native (#729) Bumps [regex](https://github.com/rust-lang/regex) from 1.5.6 to 1.6.0. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.5.6...1.6.0) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 8 ++++---- native/libcst/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index d67404b1..ae0e5951 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -591,9 +591,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.6" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", @@ -608,9 +608,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.6.26" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "rustc_version" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index c8bffddc..09be8059 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ peg = "0.8.0" chic = "1.2.2" itertools = "0.10.0" once_cell = "1.5.2" -regex = "1.5.5" +regex = "1.6.0" libcst_derive = { path = "../libcst_derive" } [dev-dependencies] From 09895298d54ef1b65204141f7af02531b46abf79 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 14:26:47 +0100 Subject: [PATCH 282/632] Bump ufmt from 1.3 to 1.3.3 (#734) Bumps [ufmt](https://github.com/omnilib/ufmt) from 1.3 to 1.3.3. - [Release notes](https://github.com/omnilib/ufmt/releases) - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v1.3.0...v1.3.3) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index ebbf91ad..7bcb5eba 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,7 +12,7 @@ prompt-toolkit>=2.0.9 pyre-check==0.9.9; platform_system != "Windows" setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 -ufmt==1.3 +ufmt==1.3.3 usort==1.0.0rc1 setuptools-rust>=0.12.1 slotscheck>=0.7.1 From bcc169f60cb9f832dcadd5f059d05bf3d5809b17 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 14:27:12 +0100 Subject: [PATCH 283/632] Bump syn from 1.0.98 to 1.0.99 in /native (#733) Bumps [syn](https://github.com/dtolnay/syn) from 1.0.98 to 1.0.99. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/1.0.98...1.0.99) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index ae0e5951..433d96de 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -694,9 +694,9 @@ checksum = "cc88c725d61fc6c3132893370cac4a0200e3fedf5da8331c570664b1987f5ca2" [[package]] name = "syn" -version = "1.0.98" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" +checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" dependencies = [ "proc-macro2", "quote", From 21550e6e042f5330e9087c69e8801f2fee181786 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 14:27:23 +0100 Subject: [PATCH 284/632] Bump once_cell from 1.12.0 to 1.13.0 in /native (#732) Bumps [once_cell](https://github.com/matklad/once_cell) from 1.12.0 to 1.13.0. - [Release notes](https://github.com/matklad/once_cell/releases) - [Changelog](https://github.com/matklad/once_cell/blob/master/CHANGELOG.md) - [Commits](https://github.com/matklad/once_cell/compare/v1.12.0...v1.13.0) --- updated-dependencies: - dependency-name: once_cell dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 433d96de..9ceba0de 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -383,9 +383,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" +checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" [[package]] name = "oorandom" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 09be8059..c98c2ef2 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -35,7 +35,7 @@ thiserror = "1.0.23" peg = "0.8.0" chic = "1.2.2" itertools = "0.10.0" -once_cell = "1.5.2" +once_cell = "1.13.0" regex = "1.6.0" libcst_derive = { path = "../libcst_derive" } From 977504f10407b3d94bbe45157f9286709dd74754 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 14:31:07 +0100 Subject: [PATCH 285/632] Update maturin requirement from <0.9,>=0.8.3 to >=0.8.3,<0.14 (#737) Updates the requirements on [maturin](https://github.com/pyo3/maturin) to permit the latest version. - [Release notes](https://github.com/pyo3/maturin/releases) - [Changelog](https://github.com/PyO3/maturin/blob/main/Changelog.md) - [Commits](https://github.com/pyo3/maturin/compare/v0.8.3...v0.13.1) --- updated-dependencies: - dependency-name: maturin dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7bcb5eba..4565c304 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ git+https://github.com/jimmylai/sphinx.git@slots_type_annotation hypothesis>=4.36.0 hypothesmith>=0.0.4 jupyter>=1.0.0 -maturin>=0.8.3,<0.9 +maturin>=0.8.3,<0.14 nbsphinx>=0.4.2 prompt-toolkit>=2.0.9 pyre-check==0.9.9; platform_system != "Windows" From 2bd6a647806b9f4639fadc178eb58d12e10ed07c Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Wed, 17 Aug 2022 20:32:00 +0800 Subject: [PATCH 286/632] Fix graph not appearing on readthedocs (#751) Fix problematic doc build, due to the new builder image provided by readthedocs doesn't has the `graphviz-dev` package pre-installed any more --- .readthedocs.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index bf9cfe15..7889c2c9 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -10,6 +10,8 @@ build: tools: python: "3" rust: "1.55" + apt_packages: + - graphviz python: install: From 73cfc7f7fa1c205da97468f98465197ec55e5b40 Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Wed, 17 Aug 2022 20:42:31 +0800 Subject: [PATCH 287/632] Fix docstring of `FullRepoManager` (#750) The render error originates from how we violate the syntax rules of the `field list` markup element of reStructuredText. The `specification of field list states](https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#field-lists) that a multi-line `field body` must be indented relative to the `field marker`. --- libcst/metadata/full_repo_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index 2a05475e..6a7c1e9a 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -29,8 +29,8 @@ class FullRepoManager: :param paths: a collection of paths to access full repository data. :param providers: a collection of metadata provider classes require accessing full repository data, currently supports - :class:`~libcst.metadata.TypeInferenceProvider` and - :class:`~libcst.metadata.FullyQualifiedNameProvider`. + :class:`~libcst.metadata.TypeInferenceProvider` and + :class:`~libcst.metadata.FullyQualifiedNameProvider`. :param timeout: number of seconds. Raises `TimeoutExpired `_ when timeout. """ From 79cf2518963157246910ba62d339a73fca1ea3f0 Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Wed, 17 Aug 2022 20:55:06 +0800 Subject: [PATCH 288/632] Update Sphinx to 5.1.1 (#748) --- docs/source/conf.py | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 2f20dcab..c210fc1d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -71,7 +71,7 @@ master_doc = "index" # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. diff --git a/requirements-dev.txt b/requirements-dev.txt index 4565c304..06ced278 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ black==22.6.0 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8,<5 -git+https://github.com/jimmylai/sphinx.git@slots_type_annotation +Sphinx>=5.1.1 hypothesis>=4.36.0 hypothesmith>=0.0.4 jupyter>=1.0.0 From 1f5f16aa77e8f4e91896a438e679b246c484ea34 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 17 Aug 2022 15:00:48 +0200 Subject: [PATCH 289/632] Skip CI checks on Python 3.11 x Windows (#752) There are no available binary wheels for lxml for Windows & Python 3.11 yet: https://bugs.launchpad.net/lxml/+bug/1977998 Until that's resolved, let's skip tests in this configuration. --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4f3fed69..ea3e393d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,6 +16,10 @@ jobs: os: [ubuntu-latest, macos-latest, windows-latest] python-version: [3.7, 3.8, 3.9, "3.10", "3.11.0-alpha - 3.11"] parser: [pure, native] + exclude: + # skip these builds until https://bugs.launchpad.net/lxml/+bug/1977998 is resolved + - os: windows-latest + python-version: "3.11.0-alpha - 3.11" steps: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 From 9f843cf4e1def94a0124b4dea41cfab256e2928f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Aug 2022 14:03:12 +0100 Subject: [PATCH 290/632] Bump usort from 1.0.0rc1 to 1.0.4 (#746) Bumps [usort](https://github.com/facebookexperimental/usort) from 1.0.0rc1 to 1.0.4. - [Release notes](https://github.com/facebookexperimental/usort/releases) - [Changelog](https://github.com/facebookexperimental/usort/blob/v1.0.4/CHANGELOG.md) - [Commits](https://github.com/facebookexperimental/usort/compare/v1.0.0rc1...v1.0.4) --- updated-dependencies: - dependency-name: usort dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 06ced278..00d0b0ed 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -13,7 +13,7 @@ pyre-check==0.9.9; platform_system != "Windows" setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 ufmt==1.3.3 -usort==1.0.0rc1 +usort==1.0.4 setuptools-rust>=0.12.1 slotscheck>=0.7.1 jinja2==3.0.3 From 7f8e755fbe2395123b2b3a3d89da7e1ab0a57c81 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Aug 2022 14:03:38 +0100 Subject: [PATCH 291/632] Bump quote from 1.0.20 to 1.0.21 in /native (#741) Bumps [quote](https://github.com/dtolnay/quote) from 1.0.20 to 1.0.21. - [Release notes](https://github.com/dtolnay/quote/releases) - [Commits](https://github.com/dtolnay/quote/compare/1.0.20...1.0.21) --- updated-dependencies: - dependency-name: quote dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 9ceba0de..be576561 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -549,9 +549,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ "proc-macro2", ] From ef2d70e37e9a935b825a78f351a47c093562fcc1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Aug 2022 14:03:51 +0100 Subject: [PATCH 292/632] Bump thiserror from 1.0.31 to 1.0.32 in /native (#742) Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.31 to 1.0.32. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.31...1.0.32) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 8 ++++---- native/libcst/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index be576561..44a89286 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -729,18 +729,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.31" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" +checksum = "f5f6586b7f764adc0231f4c79be7b920e766bb2f3e51b3661cdb263828f19994" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.31" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" +checksum = "12bafc5b54507e0149cdf1b145a5d80ab80a90bcd9275df43d4fff68460f6c21" dependencies = [ "proc-macro2", "quote", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index c98c2ef2..dc12609a 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -31,7 +31,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.4" pyo3 = { version = "0.16", optional = true } -thiserror = "1.0.23" +thiserror = "1.0.32" peg = "0.8.0" chic = "1.2.2" itertools = "0.10.0" From bfd09823ae119974cdfe2273d48d1c04597a8e3d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Aug 2022 14:04:05 +0100 Subject: [PATCH 293/632] Bump paste from 1.0.7 to 1.0.8 in /native (#743) Bumps [paste](https://github.com/dtolnay/paste) from 1.0.7 to 1.0.8. - [Release notes](https://github.com/dtolnay/paste/releases) - [Commits](https://github.com/dtolnay/paste/compare/1.0.7...1.0.8) --- updated-dependencies: - dependency-name: paste dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 44a89286..1b7d86fc 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -420,9 +420,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" +checksum = "9423e2b32f7a043629287a536f21951e8c6a82482d0acb1eeebfc90bc2225b22" [[package]] name = "peg" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index dc12609a..c21701bf 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -29,7 +29,7 @@ py = ["pyo3","pyo3/extension-module"] trace = ["peg/trace"] [dependencies] -paste = "1.0.4" +paste = "1.0.8" pyo3 = { version = "0.16", optional = true } thiserror = "1.0.32" peg = "0.8.0" From fa8ee152fb9e526951a32d682cc05a54c0581ad0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Aug 2022 18:26:10 +0100 Subject: [PATCH 294/632] Bump ufmt from 1.3.3 to 2.0.0 (#745) Bumps [ufmt](https://github.com/omnilib/ufmt) from 1.3.3 to 2.0.0. - [Release notes](https://github.com/omnilib/ufmt/releases) - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v1.3.3...v2.0.0) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 00d0b0ed..49dc34e5 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,7 +12,7 @@ prompt-toolkit>=2.0.9 pyre-check==0.9.9; platform_system != "Windows" setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 -ufmt==1.3.3 +ufmt==2.0.0 usort==1.0.4 setuptools-rust>=0.12.1 slotscheck>=0.7.1 From 5fc69d6e4e6a8d67f563a38a8da4845b5aff872d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Aug 2022 18:26:31 +0100 Subject: [PATCH 295/632] Bump jinja2 from 3.0.3 to 3.1.2 (#744) Bumps [jinja2](https://github.com/pallets/jinja) from 3.0.3 to 3.1.2. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.0.3...3.1.2) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 49dc34e5..dba55e66 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -16,4 +16,4 @@ ufmt==2.0.0 usort==1.0.4 setuptools-rust>=0.12.1 slotscheck>=0.7.1 -jinja2==3.0.3 +jinja2==3.1.2 From fc622ce790d53e6ed619901462841c7fc9ae98da Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Sat, 27 Aug 2022 01:54:07 +0800 Subject: [PATCH 296/632] Fix bug when `TypeOf` is one of options in `OneOf` / `AllOf` (#756) * Fix a bug when one of the option of `OneOf` is a `TypeOf` * Disallow `TypeOf` in `AllOf`, analogous to how `OneOf` is disallowed in `AllOf` --- libcst/matchers/_matcher_base.py | 4 +++- libcst/matchers/tests/test_matchers.py | 7 +++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index d8f69ec6..94366b91 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -226,7 +226,7 @@ class OneOf(Generic[_MatcherT], BaseMatcherNode): for option in options: if isinstance(option, AllOf): raise Exception("Cannot use AllOf and OneOf in combination!") - elif isinstance(option, OneOf): + elif isinstance(option, (OneOf, TypeOf)): actual_options.extend(option.options) else: actual_options.append(option) @@ -302,6 +302,8 @@ class AllOf(Generic[_MatcherT], BaseMatcherNode): for option in options: if isinstance(option, OneOf): raise Exception("Cannot use AllOf and OneOf in combination!") + elif isinstance(option, TypeOf): + raise Exception("Cannot use AllOf and TypeOf in combination!") elif isinstance(option, AllOf): actual_options.extend(option.options) else: diff --git a/libcst/matchers/tests/test_matchers.py b/libcst/matchers/tests/test_matchers.py index 9d79f640..e41bd866 100644 --- a/libcst/matchers/tests/test_matchers.py +++ b/libcst/matchers/tests/test_matchers.py @@ -291,6 +291,13 @@ class MatchersMatcherTest(UnitTest): self.assertTrue( matches(cst.Name("True"), m.OneOf(m.Name("True"), m.Name("False"))) ) + # Match when one of the option is a TypeOf + self.assertTrue( + matches( + cst.Name("True"), + m.OneOf(m.TypeOf(m.Name, m.NameItem)("True"), m.Name("False")), + ) + ) # Match any assignment that assigns a value of True or False to an # unspecified target. self.assertTrue( From a077104f3928b2f69206c2bb7654f20ac5199f29 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Aug 2022 18:55:30 +0100 Subject: [PATCH 297/632] Bump once_cell from 1.13.0 to 1.13.1 in /native (#754) Bumps [once_cell](https://github.com/matklad/once_cell) from 1.13.0 to 1.13.1. - [Release notes](https://github.com/matklad/once_cell/releases) - [Changelog](https://github.com/matklad/once_cell/blob/master/CHANGELOG.md) - [Commits](https://github.com/matklad/once_cell/compare/v1.13.0...v1.13.1) --- updated-dependencies: - dependency-name: once_cell dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 1b7d86fc..8dd07551 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -383,9 +383,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" +checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e" [[package]] name = "oorandom" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index c21701bf..13c29504 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -35,7 +35,7 @@ thiserror = "1.0.32" peg = "0.8.0" chic = "1.2.2" itertools = "0.10.0" -once_cell = "1.13.0" +once_cell = "1.13.1" regex = "1.6.0" libcst_derive = { path = "../libcst_derive" } From ea2490606a36eefed238422be25909cd66d05ab1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Aug 2022 16:44:47 +0100 Subject: [PATCH 298/632] Bump pyo3 from 0.16.5 to 0.17.1 in /native (#759) Bumps [pyo3](https://github.com/pyo3/pyo3) from 0.16.5 to 0.17.1. - [Release notes](https://github.com/pyo3/pyo3/releases) - [Changelog](https://github.com/PyO3/pyo3/blob/main/CHANGELOG.md) - [Commits](https://github.com/pyo3/pyo3/compare/v0.16.5...v0.17.1) --- updated-dependencies: - dependency-name: pyo3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 21 +++++++++++---------- native/libcst/Cargo.toml | 2 +- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 8dd07551..4de7d781 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -490,13 +490,14 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.16.5" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6302e85060011447471887705bb7838f14aba43fcb06957d823739a496b3dc" +checksum = "12f72538a0230791398a0986a6518ebd88abc3fded89007b506ed072acc831e1" dependencies = [ "cfg-if", "indoc", "libc", + "memoffset", "parking_lot", "pyo3-build-config", "pyo3-ffi", @@ -506,9 +507,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.16.5" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b65b546c35d8a3b1b2f0ddbac7c6a569d759f357f2b9df884f5d6b719152c8" +checksum = "fc4cf18c20f4f09995f3554e6bcf9b09bd5e4d6b67c562fdfaafa644526ba479" dependencies = [ "once_cell", "target-lexicon", @@ -516,9 +517,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.16.5" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c275a07127c1aca33031a563e384ffdd485aee34ef131116fcd58e3430d1742b" +checksum = "a41877f28d8ebd600b6aa21a17b40c3b0fc4dfe73a27b6e81ab3d895e401b0e9" dependencies = [ "libc", "pyo3-build-config", @@ -526,9 +527,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.16.5" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284fc4485bfbcc9850a6d661d627783f18d19c2ab55880b021671c4ba83e90f7" +checksum = "2e81c8d4bcc2f216dc1b665412df35e46d12ee8d3d046b381aad05f1fcf30547" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -538,9 +539,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.16.5" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53bda0f58f73f5c5429693c96ed57f7abdb38fdfc28ae06da4101a257adb7faf" +checksum = "85752a767ee19399a78272cc2ab625cd7d373b2e112b4b13db28de71fa892784" dependencies = [ "proc-macro2", "quote", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 13c29504..af4b5e10 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -30,7 +30,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.8" -pyo3 = { version = "0.16", optional = true } +pyo3 = { version = "0.17", optional = true } thiserror = "1.0.32" peg = "0.8.0" chic = "1.2.2" From 64811b779536b7bbc6f60acc74a99c82abc536a7 Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Mon, 29 Aug 2022 23:46:08 +0800 Subject: [PATCH 299/632] Tighten the metadata type of `ExpressionContextProvider` (#760) --- libcst/metadata/expression_context_provider.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index beec959d..955c14ad 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -201,7 +201,7 @@ class ExpressionContextVisitor(cst.CSTVisitor): return False -class ExpressionContextProvider(BatchableMetadataProvider[Optional[ExpressionContext]]): +class ExpressionContextProvider(BatchableMetadataProvider[ExpressionContext]): """ Provides :class:`ExpressionContext` metadata (mimics the `expr_context `__ in ast) for the @@ -209,9 +209,9 @@ class ExpressionContextProvider(BatchableMetadataProvider[Optional[ExpressionCon :class:`~libcst.Attribute`, :class:`~libcst.Subscript`, :class:`~libcst.StarredElement` , :class:`~libcst.List`, :class:`~libcst.Tuple` and :class:`~libcst.Name`. - Not that a :class:`~libcst.Name` may not always has context because of the differences between + Note that a :class:`~libcst.Name` may not always have context because of the differences between ast and LibCST. E.g. :attr:`~libcst.Attribute.attr` is a :class:`~libcst.Name` in LibCST - but a str in ast. To honor ast implementation, we don't assignment context to + but a str in ast. To honor ast implementation, we don't assign context to :attr:`~libcst.Attribute.attr`. From 27aa23f0565b096ee823bea3018a89ac0f72d951 Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Mon, 29 Aug 2022 23:47:02 +0800 Subject: [PATCH 300/632] Raise informative exception when metadata is unresolved in a metadata-based match (#757) * Raise informative exception when metadata is unresolved in a metadata-based match, instead of silently hide potential errors * Fix unit test of `findall` * Add unit test to cover the case of a resolved metadata provider doesn't provide metadata for all nodes * Document the behavior of metadata-based match when the metadata provider is unresolved --- libcst/matchers/_matcher_base.py | 19 +++++++++----- libcst/matchers/tests/test_findall.py | 19 ++++++++------ .../tests/test_matchers_with_metadata.py | 26 +++++++++++++++++-- 3 files changed, 48 insertions(+), 16 deletions(-) diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 94366b91..027c535c 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -581,8 +581,11 @@ class MatchMetadata(_BaseMetadataMatcher): """ Matcher that looks up the metadata on the current node using the provided metadata provider and compares the value on the node against the value provided - to :class:`MatchMetadata`. If the metadata value does not exist for a particular - node, :class:`MatchMetadata` will always be considered not a match. + to :class:`MatchMetadata`. + If the metadata provider is unresolved, a :class:`LookupError` exeption will be + raised and ask you to provide a :class:`~libcst.metadata.MetadataWrapper`. + If the metadata value does not exist for a particular node, :class:`MatchMetadata` + will be considered not a match. For example, to match against any function call which has one parameter which is used in a load expression context:: @@ -664,8 +667,10 @@ class MatchMetadataIfTrue(_BaseMetadataMatcher): Matcher that looks up the metadata on the current node using the provided metadata provider and passes it to a callable which can inspect the metadata further, returning ``True`` if the matcher should be considered a match. + If the metadata provider is unresolved, a :class:`LookupError` exeption will be + raised and ask you to provide a :class:`~libcst.metadata.MetadataWrapper`. If the metadata value does not exist for a particular node, - :class:`MatchMetadataIfTrue` will always be considered not a match. + :class:`MatchMetadataIfTrue` will be considered not a match. For example, to match against any arg whose qualified name might be ``typing.Dict``:: @@ -1523,8 +1528,10 @@ def _matches( def _construct_metadata_fetcher_null() -> Callable[ [meta.ProviderT, libcst.CSTNode], object ]: - def _fetch(*args: object, **kwargs: object) -> object: - return _METADATA_MISSING_SENTINEL + def _fetch(provider: meta.ProviderT, node: libcst.CSTNode) -> NoReturn: + raise LookupError( + f"{provider.__name__} is not resolved; did you forget a MetadataWrapper?" + ) return _fetch @@ -1547,7 +1554,7 @@ def _construct_metadata_fetcher_wrapper( if provider not in metadata: metadata[provider] = wrapper.resolve(provider) - node_metadata = metadata.get(provider, {}).get(node, _METADATA_MISSING_SENTINEL) + node_metadata = metadata[provider].get(node, _METADATA_MISSING_SENTINEL) if isinstance(node_metadata, LazyValue): node_metadata = node_metadata() diff --git a/libcst/matchers/tests/test_findall.py b/libcst/matchers/tests/test_findall.py index 77316a1a..6e81e481 100644 --- a/libcst/matchers/tests/test_findall.py +++ b/libcst/matchers/tests/test_findall.py @@ -103,14 +103,17 @@ class MatchersFindAllTest(UnitTest): ], ) - # Test that failing to provide metadata leads to no match - booleans = findall( - wrapper.module, - m.MatchMetadata( - meta.ExpressionContextProvider, meta.ExpressionContext.STORE - ), - ) - self.assertNodeSequenceEqual(booleans, []) + # Test that failing to provide metadata leads to raising an informative exception + with self.assertRaises( + LookupError, + msg="ExpressionContextProvider is not resolved; did you forget a MetadataWrapper?", + ): + booleans = findall( + wrapper.module, + m.MatchMetadata( + meta.ExpressionContextProvider, meta.ExpressionContext.STORE + ), + ) def test_findall_with_visitors(self) -> None: # Find all assignments in a tree diff --git a/libcst/matchers/tests/test_matchers_with_metadata.py b/libcst/matchers/tests/test_matchers_with_metadata.py index e4bdf07e..63530c37 100644 --- a/libcst/matchers/tests/test_matchers_with_metadata.py +++ b/libcst/matchers/tests/test_matchers_with_metadata.py @@ -366,14 +366,18 @@ class MatchersMetadataTest(UnitTest): ) ) - def test_lambda_metadata_matcher_with_no_metadata(self) -> None: + def test_lambda_metadata_matcher_with_unresolved_metadata(self) -> None: # Match on qualified name provider module = cst.parse_module( "from typing import List\n\ndef foo() -> None: pass\n" ) functiondef = cst.ensure_type(module.body[1], cst.FunctionDef) - self.assertFalse( + # Test that when the metadata is unresolved, raise an informative exception. + with self.assertRaises( + LookupError, + msg="QualifiedNameProvider is not resolved; did you forget a MetadataWrapper?", + ): matches( functiondef, m.FunctionDef( @@ -385,6 +389,24 @@ class MatchersMetadataTest(UnitTest): ) ), ) + + def test_lambda_metadata_matcher_with_no_metadata(self) -> None: + class VoidProvider(meta.BatchableMetadataProvider[object]): + """A dummy metadata provider""" + + module = cst.parse_module( + "from typing import List\n\ndef foo() -> None: pass\n" + ) + wrapper = cst.MetadataWrapper(module) + functiondef = cst.ensure_type(wrapper.module.body[1], cst.FunctionDef) + + # Test that when the node has no corresponding metadata, there is no match. + self.assertFalse( + matches( + functiondef, + m.FunctionDef(name=m.MatchMetadataIfTrue(VoidProvider, lambda _: True)), + metadata_resolver=wrapper, + ) ) def test_lambda_metadata_matcher_operators(self) -> None: From f92cbb7976b29c14c66a7a9ff12512b66b0f1f21 Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Mon, 5 Sep 2022 03:36:55 +0800 Subject: [PATCH 301/632] Fix doc build status badge (#763) --- README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index 710e2223..3b904cb2 100644 --- a/README.rst +++ b/README.rst @@ -9,8 +9,8 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python .. |support-ukraine| image:: https://img.shields.io/badge/Support-Ukraine-FFD500?style=flat&labelColor=005BBB :alt: Support Ukraine - Help Provide Humanitarian Aid to Ukraine. :target: https://opensource.fb.com/support-ukraine - -.. |readthedocs-badge| image:: https://readthedocs.org/projects/pip/badge/?version=latest&style=flat + +.. |readthedocs-badge| image:: https://readthedocs.org/projects/libcst/badge/?version=latest&style=flat :target: https://libcst.readthedocs.io/en/latest/ :alt: Documentation @@ -135,7 +135,7 @@ packaging tools. We recommend installing the latest stable release from For parsing, LibCST ships with a native extension, so releases are distributed as binary wheels as well as the source code. If a binary wheel is not available for your system -(Linux/Windows x86/x64 and Mac x64/arm are covered), you'll need a recent +(Linux/Windows x86/x64 and Mac x64/arm are covered), you'll need a recent `Rust toolchain `_ for installing. Further Reading @@ -190,7 +190,7 @@ this: See the `unittest documentation `_ for more examples of how to run tests. - + Building ~~~~~~~~ From 901e97749ec69458aac3096802ff105a68ab93c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Sep 2022 11:04:12 +0100 Subject: [PATCH 302/632] Bump black from 22.6.0 to 22.8.0 (#767) Bumps [black](https://github.com/psf/black) from 22.6.0 to 22.8.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.6.0...22.8.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index dba55e66..8a949c9e 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -black==22.6.0 +black==22.8.0 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8,<5 From fe706cada0b06eb332fba8a8e560d796e23a53c0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Sep 2022 11:04:36 +0100 Subject: [PATCH 303/632] Bump paste from 1.0.8 to 1.0.9 in /native (#766) Bumps [paste](https://github.com/dtolnay/paste) from 1.0.8 to 1.0.9. - [Release notes](https://github.com/dtolnay/paste/releases) - [Commits](https://github.com/dtolnay/paste/compare/1.0.8...1.0.9) --- updated-dependencies: - dependency-name: paste dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 4de7d781..ea1b914d 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -420,9 +420,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9423e2b32f7a043629287a536f21951e8c6a82482d0acb1eeebfc90bc2225b22" +checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "peg" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index af4b5e10..3a1ac6fc 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -29,7 +29,7 @@ py = ["pyo3","pyo3/extension-module"] trace = ["peg/trace"] [dependencies] -paste = "1.0.8" +paste = "1.0.9" pyo3 = { version = "0.17", optional = true } thiserror = "1.0.32" peg = "0.8.0" From c488ccb9dfab84c78ad0fd87611e7d41ebb7f592 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Sep 2022 11:37:55 +0100 Subject: [PATCH 304/632] Bump thiserror from 1.0.32 to 1.0.34 in /native (#764) Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.32 to 1.0.34. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.32...1.0.34) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 8 ++++---- native/libcst/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index ea1b914d..422b4de3 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -730,18 +730,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.32" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5f6586b7f764adc0231f4c79be7b920e766bb2f3e51b3661cdb263828f19994" +checksum = "8c1b05ca9d106ba7d2e31a9dab4a64e7be2cce415321966ea3132c49a656e252" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.32" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12bafc5b54507e0149cdf1b145a5d80ab80a90bcd9275df43d4fff68460f6c21" +checksum = "e8f2591983642de85c921015f3f070c665a197ed69e417af436115e3a1407487" dependencies = [ "proc-macro2", "quote", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 3a1ac6fc..ea9d7169 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -31,7 +31,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.9" pyo3 = { version = "0.17", optional = true } -thiserror = "1.0.32" +thiserror = "1.0.34" peg = "0.8.0" chic = "1.2.2" itertools = "0.10.0" From 986575d18538b6be7ca3a961df8175a07b2baeea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Sep 2022 11:38:51 +0100 Subject: [PATCH 305/632] Bump once_cell from 1.13.1 to 1.14.0 in /native (#765) Bumps [once_cell](https://github.com/matklad/once_cell) from 1.13.1 to 1.14.0. - [Release notes](https://github.com/matklad/once_cell/releases) - [Changelog](https://github.com/matklad/once_cell/blob/master/CHANGELOG.md) - [Commits](https://github.com/matklad/once_cell/compare/v1.13.1...v1.14.0) --- updated-dependencies: - dependency-name: once_cell dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 422b4de3..32f2d4e5 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -383,9 +383,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.13.1" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e" +checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0" [[package]] name = "oorandom" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index ea9d7169..eeb3ff9d 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -35,7 +35,7 @@ thiserror = "1.0.34" peg = "0.8.0" chic = "1.2.2" itertools = "0.10.0" -once_cell = "1.13.1" +once_cell = "1.14.0" regex = "1.6.0" libcst_derive = { path = "../libcst_derive" } From 667c713b3853a9fc4ee51639feb5c27e41489d7f Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Wed, 14 Sep 2022 21:31:36 +0800 Subject: [PATCH 306/632] Fix the bug that the use of formatter in codemods has undetermined target Python version, resulting in hard-to-reason-with behavior (#771) * When codemod, specify the black formatter to use the same target Python version we use * Fix the `test_codemod_formatter_error_input` unit test * Remove an unused import in `test_codemod_cli` module --- libcst/codemod/tests/test_codemod_cli.py | 8 +++++--- libcst/tool.py | 12 ++++++++++++ 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 934ae667..dd02e598 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -6,7 +6,6 @@ import subprocess -import sys from pathlib import Path from libcst._parser.entrypoints import is_native @@ -22,13 +21,16 @@ class TestCodemodCLI(UnitTest): "libcst.tool", "codemod", "remove_unused_imports.RemoveUnusedImportsCommand", + # `ArgumentParser.parse_known_args()`'s behavior dictates that options + # need to go after instead of before the codemod command identifier. + "--python-version", + "3.6", str(Path(__file__).parent / "codemod_formatter_error_input.py.txt"), ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) - version = sys.version_info - if version[0] == 3 and version[1] == 6 and not is_native(): + if not is_native(): self.assertIn( "ParserSyntaxError: Syntax Error @ 14:11.", rlt.stderr.decode("utf-8"), diff --git a/libcst/tool.py b/libcst/tool.py index bac61186..4101d70c 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -31,6 +31,7 @@ from libcst import ( PartialParserConfig, ) from libcst._nodes.deep_equals import deep_equals +from libcst._parser.parso.utils import parse_version_string from libcst.codemod import ( CodemodCommand, CodemodContext, @@ -538,6 +539,17 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 } command_instance = command_class(CodemodContext(), **codemod_args) + # Sepcify target version for black formatter + if os.path.basename(config["formatter"][0]) in ("black", "black.exe"): + + parsed_version = parse_version_string(args.python_version) + + config["formatter"] = [ + config["formatter"][0], + "--target-version", + f"py{parsed_version.major}{parsed_version.minor}", + ] + config["formatter"][1:] + # Special case for allowing stdin/stdout. Note that this does not allow for # full-repo metadata since there is no path. if any(p == "-" for p in args.path): From 973895a6c0dd2347b250134694f53045c524101e Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Wed, 14 Sep 2022 21:33:45 +0800 Subject: [PATCH 307/632] Several trivial refactors (#770) * Enumeration members are singletons. Copying on them would be no-op * Avoid generating unnecessary `pass` statement * Several trivial refactor * Avoid building unnecessary intermediate lists, which are mere slight waste of time and space * Remove unused import, an overlook from commit 8e6bf9e9 * `collections.abc.Mapping.get()` defaults to return `None` when key doesn't exist * Just use unittest's `assertRaises` to specify expected exception types, instead of catching every possible `Exception`s, which could suppress legitimate errors and hide bugs * We know for sure that the body of `CSTTypedTransformerFunctions` won't be empty, so don't bother with complex formal completeness --- libcst/_parser/entrypoints.py | 2 +- libcst/_parser/tests/test_parse_errors.py | 3 +-- libcst/_typed_visitor.py | 2 -- libcst/_typed_visitor_base.py | 4 +--- libcst/codegen/gather.py | 10 +++++----- libcst/codegen/gen_matcher_classes.py | 2 +- libcst/codegen/gen_type_mapping.py | 2 +- libcst/codegen/gen_visitor_functions.py | 4 ++-- libcst/codemod/_cli.py | 2 +- libcst/matchers/_decorators.py | 16 ++++------------ libcst/matchers/_matcher_base.py | 5 ++--- 11 files changed, 19 insertions(+), 33 deletions(-) diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index 461433bd..e47d9fc6 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -26,7 +26,7 @@ _DEFAULT_PARTIAL_PARSER_CONFIG: PartialParserConfig = PartialParserConfig() def is_native() -> bool: - typ = os.environ.get("LIBCST_PARSER_TYPE", None) + typ = os.environ.get("LIBCST_PARSER_TYPE") return typ == "native" diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index b39ea20e..0a058898 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -180,6 +180,5 @@ class ParseErrorsTest(UnitTest): def test_native_fallible_into_py(self) -> None: with patch("libcst._nodes.expression.Name._validate") as await_validate: await_validate.side_effect = CSTValidationError("validate is broken") - with self.assertRaises(Exception) as e: + with self.assertRaises((SyntaxError, cst.ParserSyntaxError)): cst.parse_module("foo") - self.assertIsInstance(e.exception, (SyntaxError, cst.ParserSyntaxError)) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index f68cb2a8..b63bdeec 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -6155,8 +6155,6 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): - pass - @mark_no_op def leave_Add(self, original_node: "Add", updated_node: "Add") -> "BaseBinaryOp": return updated_node diff --git a/libcst/_typed_visitor_base.py b/libcst/_typed_visitor_base.py index 3b1bd2db..de751a15 100644 --- a/libcst/_typed_visitor_base.py +++ b/libcst/_typed_visitor_base.py @@ -3,10 +3,8 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, Callable, cast, TYPE_CHECKING, TypeVar +from typing import Any, Callable, cast, TypeVar -if TYPE_CHECKING: - from libcst._typed_visitor import CSTTypedBaseFunctions # noqa: F401 # pyre-fixme[24]: Generic type `Callable` expects 2 type parameters. F = TypeVar("F", bound=Callable) diff --git a/libcst/codegen/gather.py b/libcst/codegen/gather.py index 7d7fa8d6..5eeaa7d3 100644 --- a/libcst/codegen/gather.py +++ b/libcst/codegen/gather.py @@ -7,12 +7,12 @@ import inspect from collections import defaultdict from collections.abc import Sequence as ABCSequence from dataclasses import dataclass, fields, replace -from typing import Dict, Generator, List, Mapping, Sequence, Set, Type, Union +from typing import Dict, Iterator, List, Mapping, Sequence, Set, Type, Union import libcst as cst -def _get_bases() -> Generator[Type[cst.CSTNode], None, None]: +def _get_bases() -> Iterator[Type[cst.CSTNode]]: """ Get all base classes that are subclasses of CSTNode but not an actual node itself. This allows us to keep our types sane by refering to the @@ -27,11 +27,11 @@ def _get_bases() -> Generator[Type[cst.CSTNode], None, None]: typeclasses: Sequence[Type[cst.CSTNode]] = sorted( - list(_get_bases()), key=lambda base: base.__name__ + _get_bases(), key=lambda base: base.__name__ ) -def _get_nodes() -> Generator[Type[cst.CSTNode], None, None]: +def _get_nodes() -> Iterator[Type[cst.CSTNode]]: """ Grab all CSTNodes that are not a superclass. Basically, anything that a person might use to generate a tree. @@ -53,7 +53,7 @@ def _get_nodes() -> Generator[Type[cst.CSTNode], None, None]: all_libcst_nodes: Sequence[Type[cst.CSTNode]] = sorted( - list(_get_nodes()), key=lambda node: node.__name__ + _get_nodes(), key=lambda node: node.__name__ ) node_to_bases: Dict[Type[cst.CSTNode], List[Type[cst.CSTNode]]] = {} for node in all_libcst_nodes: diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index c8453a5d..776db80b 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -547,7 +547,7 @@ for node in all_libcst_nodes: # Make sure to add an __all__ for flake8 and compatibility with "from libcst.matchers import *" -generated_code.append(f"__all__ = {repr(sorted(list(all_exports)))}") +generated_code.append(f"__all__ = {repr(sorted(all_exports))}") if __name__ == "__main__": diff --git a/libcst/codegen/gen_type_mapping.py b/libcst/codegen/gen_type_mapping.py index a5af2bd9..cc31783d 100644 --- a/libcst/codegen/gen_type_mapping.py +++ b/libcst/codegen/gen_type_mapping.py @@ -29,7 +29,7 @@ generated_code.append("") generated_code.append("") for module, objects in imports.items(): generated_code.append(f"from {module} import (") - generated_code.append(f" {', '.join(sorted(list(objects)))}") + generated_code.append(f" {', '.join(sorted(objects))}") generated_code.append(")") # Generate the base visit_ methods diff --git a/libcst/codegen/gen_visitor_functions.py b/libcst/codegen/gen_visitor_functions.py index ef369cfa..36d21a5e 100644 --- a/libcst/codegen/gen_visitor_functions.py +++ b/libcst/codegen/gen_visitor_functions.py @@ -32,7 +32,7 @@ generated_code.append("") generated_code.append("if TYPE_CHECKING:") for module, objects in imports.items(): generated_code.append(f" from {module} import ( # noqa: F401") - generated_code.append(f" {', '.join(sorted(list(objects)))}") + generated_code.append(f" {', '.join(sorted(objects))}") generated_code.append(" )") @@ -87,7 +87,6 @@ for node in sorted(nodebases.keys(), key=lambda node: node.__name__): generated_code.append("") generated_code.append("") generated_code.append("class CSTTypedTransformerFunctions(CSTTypedBaseFunctions):") -generated_code.append(" pass") for node in sorted(nodebases.keys(), key=lambda node: node.__name__): name = node.__name__ if name.startswith("Base"): @@ -111,6 +110,7 @@ for node in sorted(nodebases.keys(), key=lambda node: node.__name__): ) generated_code.append(" return updated_node") + if __name__ == "__main__": # Output the code print("\n".join(generated_code)) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 7863ac39..91b85c4d 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -557,7 +557,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 # Ensure that we have no duplicates, otherwise we might get race conditions # on write. - files = sorted(list({os.path.abspath(f) for f in files})) + files = sorted({os.path.abspath(f) for f in files}) total = len(files) progress = Progress(enabled=not hide_progress, total=total) diff --git a/libcst/matchers/_decorators.py b/libcst/matchers/_decorators.py index c5cde6ea..ea69178f 100644 --- a/libcst/matchers/_decorators.py +++ b/libcst/matchers/_decorators.py @@ -30,12 +30,10 @@ def call_if_inside( """ def inner(original: _CSTVisitFuncT) -> _CSTVisitFuncT: - if not hasattr(original, VISIT_POSITIVE_MATCHER_ATTR): - setattr(original, VISIT_POSITIVE_MATCHER_ATTR, []) setattr( original, VISIT_POSITIVE_MATCHER_ATTR, - [*getattr(original, VISIT_POSITIVE_MATCHER_ATTR), matcher], + [*getattr(original, VISIT_POSITIVE_MATCHER_ATTR, []), matcher], ) return original @@ -57,12 +55,10 @@ def call_if_not_inside( """ def inner(original: _CSTVisitFuncT) -> _CSTVisitFuncT: - if not hasattr(original, VISIT_NEGATIVE_MATCHER_ATTR): - setattr(original, VISIT_NEGATIVE_MATCHER_ATTR, []) setattr( original, VISIT_NEGATIVE_MATCHER_ATTR, - [*getattr(original, VISIT_NEGATIVE_MATCHER_ATTR), matcher], + [*getattr(original, VISIT_NEGATIVE_MATCHER_ATTR, []), matcher], ) return original @@ -88,12 +84,10 @@ def visit(matcher: BaseMatcherNode) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT """ def inner(original: _CSTVisitFuncT) -> _CSTVisitFuncT: - if not hasattr(original, CONSTRUCTED_VISIT_MATCHER_ATTR): - setattr(original, CONSTRUCTED_VISIT_MATCHER_ATTR, []) setattr( original, CONSTRUCTED_VISIT_MATCHER_ATTR, - [*getattr(original, CONSTRUCTED_VISIT_MATCHER_ATTR), matcher], + [*getattr(original, CONSTRUCTED_VISIT_MATCHER_ATTR, []), matcher], ) return original @@ -116,12 +110,10 @@ def leave(matcher: BaseMatcherNode) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT """ def inner(original: _CSTVisitFuncT) -> _CSTVisitFuncT: - if not hasattr(original, CONSTRUCTED_LEAVE_MATCHER_ATTR): - setattr(original, CONSTRUCTED_LEAVE_MATCHER_ATTR, []) setattr( original, CONSTRUCTED_LEAVE_MATCHER_ATTR, - [*getattr(original, CONSTRUCTED_LEAVE_MATCHER_ATTR), matcher], + [*getattr(original, CONSTRUCTED_LEAVE_MATCHER_ATTR, []), matcher], ) return original diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 027c535c..3e984860 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -4,7 +4,6 @@ # LICENSE file in the root directory of this source tree. import collections.abc -import copy import inspect import re from abc import ABCMeta @@ -1831,7 +1830,7 @@ class _ReplaceTransformer(libcst.CSTTransformer): if inspect.isfunction(replacement): self.replacement = replacement elif isinstance(replacement, (MaybeSentinel, RemovalSentinel)): - self.replacement = lambda node, matches: copy.deepcopy(replacement) + self.replacement = lambda node, matches: replacement else: # pyre-ignore We know this is a CSTNode. self.replacement = lambda node, matches: replacement.deep_clone() @@ -1946,7 +1945,7 @@ def replace( """ if isinstance(tree, (RemovalSentinel, MaybeSentinel)): # We can't do any replacements on this, so return the tree exactly. - return copy.deepcopy(tree) + return tree if isinstance(matcher, (AtLeastN, AtMostN)): # We can't match this, since these matchers are forbidden at top level. # These are not subclasses of BaseMatcherNode, but in the case that the From c75dbd482cda80c86e841781ace4a153b399c24a Mon Sep 17 00:00:00 2001 From: MapleCCC Date: Wed, 14 Sep 2022 22:22:45 +0800 Subject: [PATCH 308/632] Fix black configuration (#769) * Raise black's output file's target version to 3.7, which is the lowest supported Python version that libcst can be run on * Add to, instead of override, the exclusion rules of black * Fix the bug that files in `stubs/libcst_native/` are inadvertently ignored by black This is due to black's file exclusion mechanism is a file-system-unaware pure-string-based pattern match. We need to prepend "^/" to specify that we are referring to the root-level "native/" folder. Yeah, I know this looks strange, but blame black for it :) . See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format for further reference. * It's conventional to use single-quote literal string to represent regular expression in TOML format, because in this way it doesn't perform any escaping * When codemod, specify the black formatter to use the same target Python version we use * Fix the `test_codemod_formatter_error_input` unit test * Remove an unused import in `test_codemod_cli` module --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 986a2339..b3a63763 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.black] -target-version = ["py36"] -exclude = "native/.*" +target-version = ["py37"] +extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format [tool.ufmt] excludes = ["native/", "stubs/"] From e30922bf0962fcd496790bad2f5962389c24c5b1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Sep 2022 08:46:47 +0100 Subject: [PATCH 309/632] Bump itertools from 0.10.3 to 0.10.5 in /native (#785) Bumps [itertools](https://github.com/rust-itertools/itertools) from 0.10.3 to 0.10.5. - [Release notes](https://github.com/rust-itertools/itertools/releases) - [Changelog](https://github.com/rust-itertools/itertools/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-itertools/itertools/commits) --- updated-dependencies: - dependency-name: itertools dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 32f2d4e5..d3e70c20 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -261,9 +261,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index eeb3ff9d..f7d08bc3 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -34,7 +34,7 @@ pyo3 = { version = "0.17", optional = true } thiserror = "1.0.34" peg = "0.8.0" chic = "1.2.2" -itertools = "0.10.0" +itertools = "0.10.5" once_cell = "1.14.0" regex = "1.6.0" libcst_derive = { path = "../libcst_derive" } From 014605f269e118b7bddfd6b62a6ebde91ecf0269 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Sep 2022 16:46:21 +0100 Subject: [PATCH 310/632] Bump trybuild from 1.0.64 to 1.0.65 in /native (#787) Bumps [trybuild](https://github.com/dtolnay/trybuild) from 1.0.64 to 1.0.65. - [Release notes](https://github.com/dtolnay/trybuild/releases) - [Commits](https://github.com/dtolnay/trybuild/compare/1.0.64...1.0.65) --- updated-dependencies: - dependency-name: trybuild dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index d3e70c20..8914c8a2 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -651,9 +651,9 @@ checksum = "a41d061efea015927ac527063765e73601444cdc344ba855bc7bd44578b25e1c" [[package]] name = "serde" -version = "1.0.137" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" +checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" [[package]] name = "serde_cbor" @@ -667,9 +667,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.137" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" +checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", @@ -769,9 +769,9 @@ dependencies = [ [[package]] name = "trybuild" -version = "1.0.64" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f408301c7480f9e6294eb779cfc907f54bd901a9660ef24d7f233ed5376485" +checksum = "9e13556ba7dba80b3c76d1331989a341290c77efcf688eca6c307ee3066383dd" dependencies = [ "glob", "once_cell", From d7e3213281acc02986618232b220b3139793361a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Sep 2022 16:46:33 +0100 Subject: [PATCH 311/632] Bump syn from 1.0.99 to 1.0.101 in /native (#786) Bumps [syn](https://github.com/dtolnay/syn) from 1.0.99 to 1.0.101. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/1.0.99...1.0.101) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 8914c8a2..21d67ae7 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -695,9 +695,9 @@ checksum = "cc88c725d61fc6c3132893370cac4a0200e3fedf5da8331c570664b1987f5ca2" [[package]] name = "syn" -version = "1.0.99" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" +checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2" dependencies = [ "proc-macro2", "quote", From 281b2f206fe03b9754b1616a49eed4af229c58f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Sep 2022 16:47:02 +0100 Subject: [PATCH 312/632] Bump ufmt from 2.0.0 to 2.0.1 (#780) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.0.0 to 2.0.1. - [Release notes](https://github.com/omnilib/ufmt/releases) - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.0.0...v2.0.1) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 8a949c9e..7f12ab7a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,7 +12,7 @@ prompt-toolkit>=2.0.9 pyre-check==0.9.9; platform_system != "Windows" setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 -ufmt==2.0.0 +ufmt==2.0.1 usort==1.0.4 setuptools-rust>=0.12.1 slotscheck>=0.7.1 From 2a8867312864777870b39fbaebd9ff80544988ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 21:06:40 +0100 Subject: [PATCH 313/632] Bump criterion from 0.3.6 to 0.4.0 in /native (#774) Bumps [criterion](https://github.com/bheisler/criterion.rs) from 0.3.6 to 0.4.0. - [Release notes](https://github.com/bheisler/criterion.rs/releases) - [Changelog](https://github.com/bheisler/criterion.rs/blob/master/CHANGELOG.md) - [Commits](https://github.com/bheisler/criterion.rs/compare/0.3.6...0.4.0) --- updated-dependencies: - dependency-name: criterion dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 131 +++++++++++++++++++++++++++++++++++++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 127 insertions(+), 6 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 21d67ae7..1dceef98 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -11,6 +11,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "annotate-snippets" version = "0.6.1" @@ -88,6 +94,33 @@ dependencies = [ "annotate-snippets", ] +[[package]] +name = "ciborium" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369" + +[[package]] +name = "ciborium-ll" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "clap" version = "2.34.0" @@ -95,10 +128,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ "bitflags", - "textwrap", + "textwrap 0.11.0", "unicode-width", ] +[[package]] +name = "clap" +version = "3.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86447ad904c7fb335a790c9d7fe3d0d971dc523b8ccd1561a520de9a85302750" +dependencies = [ + "bitflags", + "clap_lex", + "indexmap", + "textwrap 0.15.1", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + [[package]] name = "criterion" version = "0.3.6" @@ -107,8 +161,8 @@ checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" dependencies = [ "atty", "cast 0.3.0", - "clap", - "criterion-plot", + "clap 2.34.0", + "criterion-plot 0.4.4", "csv", "itertools", "lazy_static", @@ -125,13 +179,39 @@ dependencies = [ "walkdir", ] +[[package]] +name = "criterion" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +dependencies = [ + "anes", + "atty", + "cast 0.3.0", + "ciborium", + "clap 3.2.22", + "criterion-plot 0.5.0", + "itertools", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + [[package]] name = "criterion-cycles-per-byte" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d34485a578330c7a91ccf064674f3739a7aebbf3b9d7fd498a6d3e8f7473c96" dependencies = [ - "criterion", + "criterion 0.3.6", ] [[package]] @@ -144,6 +224,16 @@ dependencies = [ "itertools", ] +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast 0.3.0", + "itertools", +] + [[package]] name = "crossbeam-channel" version = "0.5.5" @@ -235,6 +325,12 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hermit-abi" version = "0.1.19" @@ -244,6 +340,16 @@ dependencies = [ "libc", ] +[[package]] +name = "indexmap" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" +dependencies = [ + "autocfg", + "hashbrown", +] + [[package]] name = "indoc" version = "1.0.6" @@ -306,7 +412,7 @@ name = "libcst" version = "0.1.0" dependencies = [ "chic", - "criterion", + "criterion 0.4.0", "criterion-cycles-per-byte", "difference", "itertools", @@ -393,6 +499,12 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +[[package]] +name = "os_str_bytes" +version = "6.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" + [[package]] name = "parking_lot" version = "0.11.2" @@ -654,6 +766,9 @@ name = "serde" version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" +dependencies = [ + "serde_derive", +] [[package]] name = "serde_cbor" @@ -728,6 +843,12 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "textwrap" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "949517c0cf1bf4ee812e2e07e08ab448e3ae0d23472aee8a06c985f0c8815b16" + [[package]] name = "thiserror" version = "1.0.34" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index f7d08bc3..81ea6c00 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -40,7 +40,7 @@ regex = "1.6.0" libcst_derive = { path = "../libcst_derive" } [dev-dependencies] -criterion = { version = "0.3.6", features = ["html_reports"] } +criterion = { version = "0.4.0", features = ["html_reports"] } criterion-cycles-per-byte = "0.1" difference = "2.0.0" From acec81f2384fbb262b64cf4ebc811a752110df63 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 21:06:48 +0100 Subject: [PATCH 314/632] Bump usort from 1.0.4 to 1.0.5 (#781) Bumps [usort](https://github.com/facebook/usort) from 1.0.4 to 1.0.5. - [Release notes](https://github.com/facebook/usort/releases) - [Changelog](https://github.com/facebook/usort/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/usort/compare/v1.0.4...v1.0.5) --- updated-dependencies: - dependency-name: usort dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7f12ab7a..eb4211ab 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -13,7 +13,7 @@ pyre-check==0.9.9; platform_system != "Windows" setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 ufmt==2.0.1 -usort==1.0.4 +usort==1.0.5 setuptools-rust>=0.12.1 slotscheck>=0.7.1 jinja2==3.1.2 From b61013d5a957a66c344bba8c2f43f8a16ac8a7df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Oct 2022 08:38:40 +0100 Subject: [PATCH 315/632] Bump thiserror from 1.0.34 to 1.0.37 in /native (#790) Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.34 to 1.0.37. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.34...1.0.37) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 8 ++++---- native/libcst/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 1dceef98..b53e44c7 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -851,18 +851,18 @@ checksum = "949517c0cf1bf4ee812e2e07e08ab448e3ae0d23472aee8a06c985f0c8815b16" [[package]] name = "thiserror" -version = "1.0.34" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c1b05ca9d106ba7d2e31a9dab4a64e7be2cce415321966ea3132c49a656e252" +checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.34" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8f2591983642de85c921015f3f070c665a197ed69e417af436115e3a1407487" +checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" dependencies = [ "proc-macro2", "quote", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 81ea6c00..2e025b61 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -31,7 +31,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.9" pyo3 = { version = "0.17", optional = true } -thiserror = "1.0.34" +thiserror = "1.0.37" peg = "0.8.0" chic = "1.2.2" itertools = "0.10.5" From cd0988d4e7d23d82116dc6bf1e72a8009605a121 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Oct 2022 08:38:50 +0100 Subject: [PATCH 316/632] Bump once_cell from 1.14.0 to 1.15.0 in /native (#789) Bumps [once_cell](https://github.com/matklad/once_cell) from 1.14.0 to 1.15.0. - [Release notes](https://github.com/matklad/once_cell/releases) - [Changelog](https://github.com/matklad/once_cell/blob/master/CHANGELOG.md) - [Commits](https://github.com/matklad/once_cell/compare/v1.14.0...v1.15.0) --- updated-dependencies: - dependency-name: once_cell dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index b53e44c7..5f198494 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -489,9 +489,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "oorandom" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 2e025b61..21005f5d 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -35,7 +35,7 @@ thiserror = "1.0.37" peg = "0.8.0" chic = "1.2.2" itertools = "0.10.5" -once_cell = "1.14.0" +once_cell = "1.15.0" regex = "1.6.0" libcst_derive = { path = "../libcst_derive" } From 910d7923d3c55e2361ae4bca78fe3e4f3c2aa220 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 08:40:39 +0100 Subject: [PATCH 317/632] Bump black from 22.8.0 to 22.10.0 (#796) Bumps [black](https://github.com/psf/black) from 22.8.0 to 22.10.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.8.0...22.10.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index eb4211ab..5877a35f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -black==22.8.0 +black==22.10.0 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8,<5 From ce33ed31e8983e0f56f5af2ab220714f9503dd2c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 08:40:53 +0100 Subject: [PATCH 318/632] Bump trybuild from 1.0.65 to 1.0.71 in /native (#795) Bumps [trybuild](https://github.com/dtolnay/trybuild) from 1.0.65 to 1.0.71. - [Release notes](https://github.com/dtolnay/trybuild/releases) - [Commits](https://github.com/dtolnay/trybuild/compare/1.0.65...1.0.71) --- updated-dependencies: - dependency-name: trybuild dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 5f198494..2266b16f 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -890,9 +890,9 @@ dependencies = [ [[package]] name = "trybuild" -version = "1.0.65" +version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e13556ba7dba80b3c76d1331989a341290c77efcf688eca6c307ee3066383dd" +checksum = "ea496675d71016e9bc76aa42d87f16aefd95447cc5818e671e12b2d7e269075d" dependencies = [ "glob", "once_cell", From 810edaece9854643271b8842a7c3bdd30fd0840a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 08:41:17 +0100 Subject: [PATCH 319/632] Bump pyo3 from 0.17.1 to 0.17.2 in /native (#794) Bumps [pyo3](https://github.com/pyo3/pyo3) from 0.17.1 to 0.17.2. - [Release notes](https://github.com/pyo3/pyo3/releases) - [Changelog](https://github.com/PyO3/pyo3/blob/main/CHANGELOG.md) - [Commits](https://github.com/pyo3/pyo3/compare/v0.17.1...v0.17.2) --- updated-dependencies: - dependency-name: pyo3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 2266b16f..398d398c 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -602,9 +602,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.17.1" +version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f72538a0230791398a0986a6518ebd88abc3fded89007b506ed072acc831e1" +checksum = "201b6887e5576bf2f945fe65172c1fcbf3fcf285b23e4d71eb171d9736e38d32" dependencies = [ "cfg-if", "indoc", @@ -619,9 +619,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.17.1" +version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4cf18c20f4f09995f3554e6bcf9b09bd5e4d6b67c562fdfaafa644526ba479" +checksum = "bf0708c9ed01692635cbf056e286008e5a2927ab1a5e48cdd3aeb1ba5a6fef47" dependencies = [ "once_cell", "target-lexicon", @@ -629,9 +629,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.17.1" +version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a41877f28d8ebd600b6aa21a17b40c3b0fc4dfe73a27b6e81ab3d895e401b0e9" +checksum = "90352dea4f486932b72ddf776264d293f85b79a1d214de1d023927b41461132d" dependencies = [ "libc", "pyo3-build-config", @@ -639,9 +639,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.17.1" +version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e81c8d4bcc2f216dc1b665412df35e46d12ee8d3d046b381aad05f1fcf30547" +checksum = "7eb24b804a2d9e88bfcc480a5a6dd76f006c1e3edaf064e8250423336e2cd79d" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -651,9 +651,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.17.1" +version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85752a767ee19399a78272cc2ab625cd7d373b2e112b4b13db28de71fa892784" +checksum = "f22bb49f6a7348c253d7ac67a6875f2dc65f36c2ae64a82c381d528972bea6d6" dependencies = [ "proc-macro2", "quote", From 29a3ddfb4dc5961e57877aed62c7f156d9cc86c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 08:41:34 +0100 Subject: [PATCH 320/632] Bump syn from 1.0.101 to 1.0.102 in /native (#793) Bumps [syn](https://github.com/dtolnay/syn) from 1.0.101 to 1.0.102. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/1.0.101...1.0.102) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 398d398c..b969654b 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -593,9 +593,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.40" +version = "1.0.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" +checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b" dependencies = [ "unicode-ident", ] @@ -810,9 +810,9 @@ checksum = "cc88c725d61fc6c3132893370cac4a0200e3fedf5da8331c570664b1987f5ca2" [[package]] name = "syn" -version = "1.0.101" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2" +checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1" dependencies = [ "proc-macro2", "quote", From a7733f6c59f225937fb5635cabea5c3ea6df995a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 08:41:50 +0100 Subject: [PATCH 321/632] Bump peg from 0.8.0 to 0.8.1 in /native (#783) Bumps [peg](https://github.com/kevinmehall/rust-peg) from 0.8.0 to 0.8.1. - [Release notes](https://github.com/kevinmehall/rust-peg/releases) - [Commits](https://github.com/kevinmehall/rust-peg/compare/0.8.0...0.8.1) --- updated-dependencies: - dependency-name: peg dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 12 ++++++------ native/libcst/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index b969654b..0d7ccd66 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -538,9 +538,9 @@ checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "peg" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af728fe826811af3b38c37e93de6d104485953ea373d656eebae53d6987fcd2c" +checksum = "a07f2cafdc3babeebc087e499118343442b742cc7c31b4d054682cc598508554" dependencies = [ "peg-macros", "peg-runtime", @@ -548,9 +548,9 @@ dependencies = [ [[package]] name = "peg-macros" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4536be147b770b824895cbad934fccce8e49f14b4c4946eaa46a6e4a12fcdc16" +checksum = "4a90084dc05cf0428428e3d12399f39faad19b0909f64fb9170c9fdd6d9cd49b" dependencies = [ "peg-runtime", "proc-macro2", @@ -559,9 +559,9 @@ dependencies = [ [[package]] name = "peg-runtime" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9b0efd3ba03c3a409d44d60425f279ec442bcf0b9e63ff4e410da31c8b0f69f" +checksum = "9fa00462b37ead6d11a82c9d568b26682d78e0477dc02d1966c013af80969739" [[package]] name = "plotters" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 21005f5d..74288131 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -32,7 +32,7 @@ trace = ["peg/trace"] paste = "1.0.9" pyo3 = { version = "0.17", optional = true } thiserror = "1.0.37" -peg = "0.8.0" +peg = "0.8.1" chic = "1.2.2" itertools = "0.10.5" once_cell = "1.15.0" From c606585672ec3b4ae4ca05ac20782cda8f183b37 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Nov 2022 15:38:13 +0000 Subject: [PATCH 322/632] Bump syn from 1.0.102 to 1.0.103 in /native (#799) Bumps [syn](https://github.com/dtolnay/syn) from 1.0.102 to 1.0.103. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/1.0.102...1.0.103) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 0d7ccd66..562180d1 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -810,9 +810,9 @@ checksum = "cc88c725d61fc6c3132893370cac4a0200e3fedf5da8331c570664b1987f5ca2" [[package]] name = "syn" -version = "1.0.102" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1" +checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" dependencies = [ "proc-macro2", "quote", From 0ef632811abd7f94d4e48073c552441b4f6285af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Nov 2022 16:25:27 +0000 Subject: [PATCH 323/632] Bump once_cell from 1.15.0 to 1.16.0 in /native (#802) Bumps [once_cell](https://github.com/matklad/once_cell) from 1.15.0 to 1.16.0. - [Release notes](https://github.com/matklad/once_cell/releases) - [Changelog](https://github.com/matklad/once_cell/blob/master/CHANGELOG.md) - [Commits](https://github.com/matklad/once_cell/compare/v1.15.0...v1.16.0) --- updated-dependencies: - dependency-name: once_cell dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 562180d1..2e618c30 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -489,9 +489,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" +checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860" [[package]] name = "oorandom" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 74288131..67778bb9 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -35,7 +35,7 @@ thiserror = "1.0.37" peg = "0.8.1" chic = "1.2.2" itertools = "0.10.5" -once_cell = "1.15.0" +once_cell = "1.16.0" regex = "1.6.0" libcst_derive = { path = "../libcst_derive" } From 1e88f1ed428f87793c9ed6eff735af3484118eeb Mon Sep 17 00:00:00 2001 From: Vincent Fazio Date: Wed, 2 Nov 2022 11:25:47 -0500 Subject: [PATCH 324/632] Python 3.11 wheels (#801) * [ci] narrow python 3.11 version window Also, quote the versions for consistency. Signed-off-by: Vincent Fazio * [ci] bump cibuildwheel to 2.11.2 Newer versions support building 3.11 wheels automatically, so just take the latest currently available. Signed-off-by: Vincent Fazio Signed-off-by: Vincent Fazio --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 530b39ad..f60909e6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -60,7 +60,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.3.1 + uses: pypa/cibuildwheel@v2.11.2 - uses: actions/upload-artifact@v2 with: path: wheelhouse/*.whl diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ea3e393d..76cf49a4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,12 +14,12 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.7, 3.8, 3.9, "3.10", "3.11.0-alpha - 3.11"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] parser: [pure, native] exclude: # skip these builds until https://bugs.launchpad.net/lxml/+bug/1977998 is resolved - os: windows-latest - python-version: "3.11.0-alpha - 3.11" + python-version: "3.11" steps: - uses: actions/checkout@v1 - uses: actions/setup-python@v2 From c105fd33bacaca64458c4f9e56af1bf4803f6a4c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 10:46:10 +0000 Subject: [PATCH 325/632] Bump regex from 1.6.0 to 1.7.0 in /native (#806) Bumps [regex](https://github.com/rust-lang/regex) from 1.6.0 to 1.7.0. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.6.0...1.7.0) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 2e618c30..fd6779c2 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -704,9 +704,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" +checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" dependencies = [ "aho-corasick", "memchr", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 67778bb9..e068fefa 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ peg = "0.8.1" chic = "1.2.2" itertools = "0.10.5" once_cell = "1.16.0" -regex = "1.6.0" +regex = "1.7.0" libcst_derive = { path = "../libcst_derive" } [dev-dependencies] From bd4f541f2c19af19c0add477d39014760ddf3dc2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 10:46:35 +0000 Subject: [PATCH 326/632] Bump pyo3 from 0.17.2 to 0.17.3 in /native (#805) Bumps [pyo3](https://github.com/pyo3/pyo3) from 0.17.2 to 0.17.3. - [Release notes](https://github.com/pyo3/pyo3/releases) - [Changelog](https://github.com/PyO3/pyo3/blob/main/CHANGELOG.md) - [Commits](https://github.com/pyo3/pyo3/compare/v0.17.2...v0.17.3) --- updated-dependencies: - dependency-name: pyo3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index fd6779c2..10c8596f 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -602,9 +602,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.17.2" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201b6887e5576bf2f945fe65172c1fcbf3fcf285b23e4d71eb171d9736e38d32" +checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543" dependencies = [ "cfg-if", "indoc", @@ -619,9 +619,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.17.2" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf0708c9ed01692635cbf056e286008e5a2927ab1a5e48cdd3aeb1ba5a6fef47" +checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8" dependencies = [ "once_cell", "target-lexicon", @@ -629,9 +629,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.17.2" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90352dea4f486932b72ddf776264d293f85b79a1d214de1d023927b41461132d" +checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc" dependencies = [ "libc", "pyo3-build-config", @@ -639,9 +639,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.17.2" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb24b804a2d9e88bfcc480a5a6dd76f006c1e3edaf064e8250423336e2cd79d" +checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -651,9 +651,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.17.2" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f22bb49f6a7348c253d7ac67a6875f2dc65f36c2ae64a82c381d528972bea6d6" +checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f" dependencies = [ "proc-macro2", "quote", From c44b182e88fb5135935c01a6fbec260597780b16 Mon Sep 17 00:00:00 2001 From: Matthew Shaer Date: Thu, 10 Nov 2022 09:38:32 +0000 Subject: [PATCH 327/632] Adding a provider which can tell what accessor to use to go from the parent to that child node (#807) --- libcst/metadata/__init__.py | 2 + libcst/metadata/accessor_provider.py | 19 ++++++ .../metadata/tests/test_accessor_provider.py | 68 +++++++++++++++++++ 3 files changed, 89 insertions(+) create mode 100644 libcst/metadata/accessor_provider.py create mode 100644 libcst/metadata/tests/test_accessor_provider.py diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index 75e38229..66e7e525 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -5,6 +5,7 @@ from libcst._position import CodePosition, CodeRange +from libcst.metadata.accessor_provider import AccessorProvider from libcst.metadata.base_provider import ( BaseMetadataProvider, BatchableMetadataProvider, @@ -86,6 +87,7 @@ __all__ = [ "Accesses", "TypeInferenceProvider", "FullRepoManager", + "AccessorProvider", # Experimental APIs: "ExperimentalReentrantCodegenProvider", "CodegenPartial", diff --git a/libcst/metadata/accessor_provider.py b/libcst/metadata/accessor_provider.py new file mode 100644 index 00000000..5d4f22e4 --- /dev/null +++ b/libcst/metadata/accessor_provider.py @@ -0,0 +1,19 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +import dataclasses + +import libcst as cst + +from libcst.metadata.base_provider import VisitorMetadataProvider + + +class AccessorProvider(VisitorMetadataProvider[str]): + def on_visit(self, node: cst.CSTNode) -> bool: + for f in dataclasses.fields(node): + child = getattr(node, f.name) + self.set_metadata(child, f.name) + return True diff --git a/libcst/metadata/tests/test_accessor_provider.py b/libcst/metadata/tests/test_accessor_provider.py new file mode 100644 index 00000000..6ccfad5e --- /dev/null +++ b/libcst/metadata/tests/test_accessor_provider.py @@ -0,0 +1,68 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import dataclasses + +from textwrap import dedent + +import libcst as cst +from libcst.metadata import AccessorProvider, MetadataWrapper +from libcst.testing.utils import data_provider, UnitTest + + +class DependentVisitor(cst.CSTVisitor): + METADATA_DEPENDENCIES = (AccessorProvider,) + + def __init__(self, *, test: UnitTest) -> None: + self.test = test + + def on_visit(self, node: cst.CSTNode) -> bool: + for f in dataclasses.fields(node): + child = getattr(node, f.name) + if type(child) is cst.CSTNode: + accessor = self.get_metadata(AccessorProvider, child) + self.test.assertEqual(accessor, f.name) + + return True + + +class AccessorProviderTest(UnitTest): + @data_provider( + ( + ( + """ + foo = 'toplevel' + fn1(foo) + fn2(foo) + def fn_def(): + foo = 'shadow' + fn3(foo) + """, + ), + ( + """ + global_var = None + @cls_attr + class Cls(cls_attr, kwarg=cls_attr): + cls_attr = 5 + def f(): + pass + """, + ), + ( + """ + iterator = None + condition = None + [elt for target in iterator if condition] + {elt for target in iterator if condition} + {elt: target for target in iterator if condition} + (elt for target in iterator if condition) + """, + ), + ) + ) + def test_accessor_provier(self, code: str) -> None: + wrapper = MetadataWrapper(cst.parse_module(dedent(code))) + wrapper.visit(DependentVisitor(test=self)) From 94e607070d727d6ede3ab35fc8dc9ff6c8382a77 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 10 Nov 2022 10:12:14 +0000 Subject: [PATCH 328/632] Auto-update github actions using dependabot --- .github/dependabot.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index eb037e18..40738c8d 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,3 +11,8 @@ updates: directory: "/native" schedule: interval: weekly + + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: weekly From ceb4619da52b3c21438539237df0c765ad267d07 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 10 Nov 2022 10:12:55 +0000 Subject: [PATCH 329/632] Bump setuptools-rust version (#809) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 5877a35f..09bcd66b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,6 +14,6 @@ setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 ufmt==2.0.1 usort==1.0.5 -setuptools-rust>=0.12.1 +setuptools-rust>=1.5.2 slotscheck>=0.7.1 jinja2==3.1.2 From 95e65a4022d7413f55f2a11d08858cabb70c4611 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 10 Nov 2022 10:13:22 +0000 Subject: [PATCH 330/632] Remove deprecated ::set-output directive (#808) --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f60909e6..d8c43c2c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -18,7 +18,7 @@ jobs: matrix=$(jq --arg ref "${{ github.ref }}" \ 'map(select(.on_ref_regex as $pat | $pat == null or ($ref | test($pat))) | del(.on_ref_regex))' \ .github/build-matrix.json) - echo ::set-output name=matrix::{\"include\":$(echo $matrix)}\" + echo matrix={\"include\":$(echo $matrix)}\" >> $GITHUB_OUTPUT build: name: Build wheels on ${{ join(matrix.os, '/') }}/${{ matrix.vers }} From fc6e0c6a64a08c1c7c760317e62665f54b3b490f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Nov 2022 10:27:36 +0000 Subject: [PATCH 331/632] Bump actions/upload-artifact from 2 to 3 (#811) Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 2 to 3. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v2...v3) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d8c43c2c..aec09d18 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -61,7 +61,7 @@ jobs: echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels uses: pypa/cibuildwheel@v2.11.2 - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl name: wheels diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 76cf49a4..2fd78160 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -122,7 +122,7 @@ jobs: fail_ci_if_error: true verbose: true - name: Archive Coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage.xml @@ -147,7 +147,7 @@ jobs: - uses: ts-graphviz/setup-graphviz@v1 - run: sphinx-build docs/source/ docs/build/ - name: Archive Docs - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: sphinx-docs path: docs/build From b62ce9218f9304701f2ccb0abda0f6c21f175e5b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Nov 2022 21:50:54 +0000 Subject: [PATCH 332/632] Bump codecov/codecov-action from 2 to 3 (#812) Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 2 to 3. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v2...v3) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2fd78160..674b9317 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -116,7 +116,7 @@ jobs: run: | coverage run setup.py test coverage xml -i - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 with: files: coverage.xml fail_ci_if_error: true From ede2616ff214100584b7372c83331bfb23f8555c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Nov 2022 21:51:18 +0000 Subject: [PATCH 333/632] Bump actions/download-artifact from 2 to 3 (#815) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 2 to 3. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v2...v3) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/pypi_upload.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index ba7baf2e..ce22f3eb 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -20,7 +20,7 @@ jobs: - uses: actions/checkout@v1 - name: Download binary wheels id: download - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: name: wheels path: wheelhouse From ff01b86786e3b1332089ad630b5f980fe23172eb Mon Sep 17 00:00:00 2001 From: Andrey Semakin Date: Fri, 11 Nov 2022 14:00:42 +0500 Subject: [PATCH 334/632] Add py3.11 classifier (#816) --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 294f1b32..0275f53a 100644 --- a/setup.py +++ b/setup.py @@ -70,6 +70,7 @@ setuptools.setup( "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ], zip_safe=False, # for mypy compatibility https://mypy.readthedocs.io/en/latest/installed_packages.html ) From a284947b8f54cd7cf78940d75df8a257a3de8186 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Nov 2022 09:04:21 +0000 Subject: [PATCH 335/632] Bump actions/checkout from 1 to 3 (#810) Bumps [actions/checkout](https://github.com/actions/checkout) from 1 to 3. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v1...v3) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 4 ++-- .github/workflows/ci.yml | 14 +++++++------- .github/workflows/pypi_upload.yml | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index aec09d18..f6b9e4fa 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,7 +10,7 @@ jobs: outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - id: set-matrix # match github.ref to the on_ref_regex field in the json # to skip running linux/aarch64 builds on PRs @@ -39,7 +39,7 @@ jobs: CIBW_ARCHS: ${{ matrix.vers }} CIBW_BUILD_VERBOSITY: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - uses: actions/setup-python@v2 if: ${{ !contains(matrix.os, 'self-hosted') }} with: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 674b9317..98c2b9e0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,7 @@ jobs: - os: windows-latest python-version: "3.11" steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -52,7 +52,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - uses: actions/setup-python@v2 with: python-version: "3.10" @@ -75,7 +75,7 @@ jobs: typecheck: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - uses: actions/setup-python@v2 with: python-version: "3.10" @@ -99,7 +99,7 @@ jobs: coverage: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - uses: actions/setup-python@v2 with: python-version: "3.10" @@ -131,7 +131,7 @@ jobs: docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - uses: actions/setup-python@v2 with: python-version: "3.10" @@ -161,7 +161,7 @@ jobs: matrix: os: [ubuntu-latest, macos-latest, windows-latest] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: actions-rs/toolchain@v1 with: toolchain: stable @@ -194,7 +194,7 @@ jobs: name: Rustfmt runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: actions-rs/toolchain@v1 with: profile: minimal diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index ce22f3eb..6b6c313b 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - name: Download binary wheels id: download uses: actions/download-artifact@v3 From 667c0c3e14c75fd9fee2d41933ee80b7f03b4db1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Nov 2022 10:39:48 +0000 Subject: [PATCH 336/632] Bump actions/setup-python from 2 to 4 (#819) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 2 to 4. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v2...v4) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 12 ++++++------ .github/workflows/pypi_upload.yml | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f6b9e4fa..5fd27906 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -40,7 +40,7 @@ jobs: CIBW_BUILD_VERBOSITY: 1 steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 if: ${{ !contains(matrix.os, 'self-hosted') }} with: python-version: "3.10" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 98c2b9e0..13acbf48 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: python-version: "3.11" steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - uses: actions/cache@v2 @@ -53,7 +53,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.10" - uses: actions/cache@v2 @@ -76,7 +76,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.10" - uses: actions/cache@v2 @@ -100,7 +100,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.10" - uses: actions/cache@v2 @@ -132,7 +132,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.10" - uses: actions/cache@v2 @@ -170,7 +170,7 @@ jobs: uses: Swatinem/rust-cache@v1.3.0 with: working-directory: native - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.10" - name: test diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 6b6c313b..5673e113 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -24,7 +24,7 @@ jobs: with: name: wheels path: wheelhouse - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.10" - uses: actions/cache@v2 From 987aff666466b724600a3e1c51f98f8560ceb323 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Nov 2022 10:40:06 +0000 Subject: [PATCH 337/632] Bump actions/cache from 2 to 3 (#820) Bumps [actions/cache](https://github.com/actions/cache) from 2 to 3. - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v2...v3) --- updated-dependencies: - dependency-name: actions/cache dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 10 +++++----- .github/workflows/pypi_upload.yml | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5fd27906..7e6420f4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -44,7 +44,7 @@ jobs: if: ${{ !contains(matrix.os, 'self-hosted') }} with: python-version: "3.10" - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: cache if: ${{ !contains(matrix.os, 'self-hosted') }} with: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 13acbf48..929d8d90 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,7 +25,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} @@ -56,7 +56,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} @@ -79,7 +79,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} @@ -103,7 +103,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} @@ -135,7 +135,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 5673e113..963e04eb 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -27,7 +27,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} From f668e88dd20b185fc79bedc4c8b3a237dd52f66d Mon Sep 17 00:00:00 2001 From: Carl Meyer Date: Tue, 29 Nov 2022 15:24:24 -0700 Subject: [PATCH 338/632] fix PEP 604 union annotations in decorators (#828) --- libcst/matchers/_visitors.py | 20 +++++++++++++------- libcst/matchers/tests/test_decorators.py | 15 ++++----------- 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index ded6eb9d..a314fc4d 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -45,6 +45,15 @@ from libcst.matchers._matcher_base import ( ) from libcst.matchers._return_types import TYPED_FUNCTION_RETURN_MAPPING +try: + # PEP 604 unions, in Python 3.10+ + from types import UnionType +except ImportError: + # We use this for isinstance; no annotation will be an instance of this + class UnionType: + pass + + CONCRETE_METHODS: Set[str] = { *{f"visit_{cls.__name__}" for cls in TYPED_FUNCTION_RETURN_MAPPING}, *{f"leave_{cls.__name__}" for cls in TYPED_FUNCTION_RETURN_MAPPING}, @@ -78,18 +87,15 @@ def _get_possible_match_classes(matcher: BaseMatcherNode) -> List[Type[cst.CSTNo return [getattr(cst, matcher.__class__.__name__)] -def _annotation_looks_like_union(annotation: object) -> bool: - if getattr(annotation, "__origin__", None) is Union: - return True - # support PEP-604 style unions introduced in Python 3.10 +def _annotation_is_union(annotation: object) -> bool: return ( - annotation.__class__.__name__ == "Union" - and annotation.__class__.__module__ == "types" + isinstance(annotation, UnionType) + or getattr(annotation, "__origin__", None) is Union ) def _get_possible_annotated_classes(annotation: object) -> List[Type[object]]: - if _annotation_looks_like_union(annotation): + if _annotation_is_union(annotation): return getattr(annotation, "__args__", []) else: return [cast(Type[object], annotation)] diff --git a/libcst/matchers/tests/test_decorators.py b/libcst/matchers/tests/test_decorators.py index 7486cee8..8b28657c 100644 --- a/libcst/matchers/tests/test_decorators.py +++ b/libcst/matchers/tests/test_decorators.py @@ -3,10 +3,11 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import sys from ast import literal_eval from textwrap import dedent from typing import List, Set -from unittest.mock import Mock +from unittest import skipIf import libcst as cst import libcst.matchers as m @@ -996,22 +997,14 @@ class MatchersVisitLeaveDecoratorsTest(UnitTest): self.assertEqual(visitor.visits, ['"baz"']) -# This is meant to simulate `cst.ImportFrom | cst.RemovalSentinel` in py3.10 -FakeUnionClass: Mock = Mock() -setattr(FakeUnionClass, "__name__", "Union") -setattr(FakeUnionClass, "__module__", "types") -FakeUnion: Mock = Mock() -FakeUnion.__class__ = FakeUnionClass -FakeUnion.__args__ = [cst.ImportFrom, cst.RemovalSentinel] - - class MatchersUnionDecoratorsTest(UnitTest): + @skipIf(bool(sys.version_info < (3, 10)), "new union syntax not available") def test_init_with_new_union_annotation(self) -> None: class TransformerWithUnionReturnAnnotation(m.MatcherDecoratableTransformer): @m.leave(m.ImportFrom(module=m.Name(value="typing"))) def test( self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom - ) -> FakeUnion: + ) -> cst.ImportFrom | cst.RemovalSentinel: pass # assert that init (specifically _check_types on return annotation) passes From bfd8e495acafb0b8d459a50ed67770dac2d92f90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Jan 2023 13:50:19 +0000 Subject: [PATCH 339/632] Bump pypa/cibuildwheel from 2.11.2 to 2.12.0 (#857) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.11.2 to 2.12.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.11.2...v2.12.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7e6420f4..5f2457ae 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -60,7 +60,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.11.2 + uses: pypa/cibuildwheel@v2.12.0 - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl From de28541fa322e466e48fc876c0d3bdcc2c1a5759 Mon Sep 17 00:00:00 2001 From: Sagar Badiyani <34768752+sagarbadiyani@users.noreply.github.com> Date: Tue, 24 Jan 2023 19:21:56 +0530 Subject: [PATCH 340/632] [AddImportsVisitor] Docstring Check Only for the Top Element of the Body (#841) * Initial Commit * lint fix --- libcst/codemod/visitors/_add_imports.py | 2 +- .../visitors/tests/test_add_imports.py | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index a3ca32f9..9c4f04c5 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -283,7 +283,7 @@ class AddImportsVisitor(ContextAwareTransformer): # original tree but break up the statements of the modified tree. If we # change this assumption in this visitor, we will have to change this code. for i, statement in enumerate(orig_module.body): - if m.matches( + if i == 0 and m.matches( statement, m.SimpleStatementLine(body=[m.Expr(value=m.SimpleString())]) ): statement_before_import_location = import_add_location = 1 diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index e7a191a9..68437522 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -891,3 +891,36 @@ class TestAddImportsCodemod(CodemodTest): full_module_name="a.b.foobar", full_package_name="a.b" ), ) + + def test_import_in_module_with_standalone_string_not_a_docstring( + self, + ) -> None: + """ + The import should be added after the __future__ imports. + """ + before = """ + from __future__ import annotations + from __future__ import division + + '''docstring.''' + def func(): + pass + """ + after = """ + from __future__ import annotations + from __future__ import division + import typing + + '''docstring.''' + def func(): + pass + """ + + self.assertCodemod( + before, + after, + [ImportItem("typing", None, None)], + context_override=CodemodContext( + full_module_name="a.b.foobar", full_package_name="a.b" + ), + ) From 1ee04c6ce56e75eba18112c5d95f8eb16487b8f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Jan 2023 13:53:37 +0000 Subject: [PATCH 341/632] Bump black from 22.10.0 to 22.12.0 (#832) Bumps [black](https://github.com/psf/black) from 22.10.0 to 22.12.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/22.10.0...22.12.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 09bcd66b..d34c666a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -black==22.10.0 +black==22.12.0 coverage>=4.5.4 fixit==0.1.1 flake8>=3.7.8,<5 From b5c34d39a0400ff4b8b1e668c3d1a7d9502a0eee Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 16 Feb 2023 10:49:05 -0800 Subject: [PATCH 342/632] Fix Github issue 855 - fail to parse with statement (#861) * Fix Github issue 855 - fail to parse with statement When we added support for parenthesized with statements, the grammar on the with itself was correct (it's a right and left parenthesis around a comma-separated list of with-items, with a possible trailing comma). But inside of the "as" variation of the with_item rule we have a peek at the next character, which was allowing for a comma or a colon. That peek needs to also accept right parentheses - otherwise, if the last item contains an `as` and has no trailing comma we fail to parse. The bug is exercisecd by, for example, this code snippet: ``` with (foo, bar as bar,): pass ``` The with_wickedness test fixture has been revised to include both the plain and async variations of this example snippet with and without trailing comma, and tests pass after the peek rule fix. * Add more tests covering the plain expression form of `with_item` --- native/libcst/src/parser/grammar.rs | 2 +- .../libcst/tests/fixtures/with_wickedness.py | 45 +++++++++++++++++-- 2 files changed, 43 insertions(+), 4 deletions(-) diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index decf983b..14622c1a 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -516,7 +516,7 @@ parser! { } rule with_item() -> WithItem<'input, 'a> - = e:expression() a:lit("as") t:star_target() &(lit(",") / lit(":")) { + = e:expression() a:lit("as") t:star_target() &(lit(",") / lit(":") / rpar()) { make_with_item(e, Some(a), Some(t)) } / e:expression() { diff --git a/native/libcst/tests/fixtures/with_wickedness.py b/native/libcst/tests/fixtures/with_wickedness.py index 7cb5c67d..ee6ff7b6 100644 --- a/native/libcst/tests/fixtures/with_wickedness.py +++ b/native/libcst/tests/fixtures/with_wickedness.py @@ -1,13 +1,52 @@ # with_wickedness -with foo : ... +with foo : + pass + +with foo, bar: + pass + +with (foo, bar): + pass + +with (foo, bar,): + pass + +with foo, bar as bar: + pass + +with (foo, bar as bar): + pass + +with (foo, bar as bar,): + pass async def f(): - async with foo as bar: + async with foo: with bar: pass - async with foo(1+1) as bar , 1 as (a, b, ) , 2 as [a, b] , 3 as a[b] : + async with foo : pass + async with foo, bar: + pass + + async with (foo, bar): + pass + + async with (foo, bar,): + pass + + async with foo, bar as bar: + pass + + async with (foo, bar as bar): + pass + + async with (foo, bar as bar,): + pass + + async with foo(1+1) as bar , 1 as (a, b, ) , 2 as [a, b] , 3 as a[b] : + pass From 8aebbb612148a8d4b8728b84bf956adff911021a Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Fri, 17 Feb 2023 14:16:03 -0800 Subject: [PATCH 343/632] Ignore common virtualenv names (#863) --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 9bb9370a..faef172f 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,5 @@ libcst/_version.py .hypothesis/ .python-version target/ +venv/ +.venv/ From 944ff159f667c6ea942b4b1f60fadbe6dda2175d Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Tue, 21 Feb 2023 18:47:09 -0800 Subject: [PATCH 344/632] Add setuptools-rust to build requirements in setup.py (#873) Hoping this resolves the CI failures on 3.11 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0275f53a..b6aeb400 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ setuptools.setup( }, test_suite="libcst", python_requires=">=3.7", - setup_requires=["setuptools_scm"], + setup_requires=["setuptools-rust", "setuptools_scm"], install_requires=[dep.strip() for dep in open("requirements.txt").readlines()], extras_require={ "dev": [ From d94687e37880c21234cd70d39b242b0be56b6dee Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Wed, 22 Feb 2023 12:35:23 -0800 Subject: [PATCH 345/632] Script to regenerate test fixtures (#872) Upgrading Pyre requires updating test fixtures with any upstream changes to Pyre's query results for the `simple_class.py` fixture. This adds a new `scripts/` directory to the repo, with a script to regenerate test fixtures. The script regenerates the cache data fixture, and updates the `TypeInferenceProvider` tests to use `assertDictEqual` and helpful error messages for better behavior in future mismatches. This also includes a slight bump to Pyre 0.9.10 to fix install issues on Apple Silicon M1 Macs, and regenerated fixtures using the script above. --- .github/workflows/ci.yml | 2 +- libcst/matchers/_visitors.py | 8 - .../tests/test_type_inference_provider.py | 10 +- libcst/tests/pyre/simple_class.json | 468 +++++++++--------- libcst/tests/test_pyre_integration.py | 35 +- requirements-dev.txt | 2 +- scripts/regenerate-fixtures.py | 42 ++ 7 files changed, 291 insertions(+), 276 deletions(-) create mode 100644 scripts/regenerate-fixtures.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 929d8d90..b1a0b2dd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -92,7 +92,7 @@ jobs: run: pip install -e . - run: pyre --version - run: pyre -n check - - run: python libcst/tests/test_pyre_integration.py + - run: python scripts/regenerate-fixtures.py - run: git diff --exit-code # Upload test coverage diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index a314fc4d..a491ffd1 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -323,10 +323,6 @@ def _gather_constructed_visit_funcs( _assert_not_concrete("visit", func) for matcher in matchers: casted_matcher = cast(BaseMatcherNode, matcher) - # pyre-fixme[6]: Expected - # `Sequence[typing.Callable[[cst._nodes.base.CSTNode], None]]` for 2nd - # param but got `Tuple[*Tuple[(CSTNode) -> None, ...], (CSTNode) -> - # None]`. constructed_visitors[casted_matcher] = ( *constructed_visitors.get(casted_matcher, ()), func, @@ -362,10 +358,6 @@ def _gather_constructed_leave_funcs( _assert_not_concrete("leave", func) for matcher in matchers: casted_matcher = cast(BaseMatcherNode, matcher) - # pyre-fixme[6]: Expected - # `Sequence[typing.Callable[[cst._nodes.base.CSTNode], None]]` for 2nd - # param but got `Tuple[*Tuple[(CSTNode) -> None, ...], (CSTNode) -> - # None]`. constructed_visitors[casted_matcher] = ( *constructed_visitors.get(casted_matcher, ()), func, diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index c52a7c8e..50ca3458 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -9,6 +9,7 @@ import os import subprocess import sys from pathlib import Path +from typing import cast, Mapping, Optional from unittest import skipIf import libcst as cst @@ -57,6 +58,8 @@ def _test_simple_class_helper(test: UnitTest, wrapper: MetadataWrapper) -> None: ) @skipIf(sys.platform == "win32", "TypeInferenceProvider doesn't support windows") class TypeInferenceProviderTest(UnitTest): + maxDiff: Optional[int] = None + @classmethod def setUpClass(cls) -> None: os.chdir(TEST_SUITE_PATH) @@ -79,8 +82,13 @@ class TypeInferenceProviderTest(UnitTest): cache = TypeInferenceProvider.gen_cache( root_path=source_path.parent, paths=[source_path.name], timeout=None ) + result = cast(Mapping[str, object], cache[source_path.name]) data: PyreData = json.loads(data_path.read_text()) - self.assertEqual(data, cache[source_path.name]) + self.assertDictEqual( + data, + result, + "Pyre query result mismatch, try running `scripts/regenerate-fixtures.py`?", + ) @data_provider( ((TEST_SUITE_PATH / "simple_class.py", TEST_SUITE_PATH / "simple_class.json"),) diff --git a/libcst/tests/pyre/simple_class.json b/libcst/tests/pyre/simple_class.json index 288bb567..878ed5eb 100644 --- a/libcst/tests/pyre/simple_class.json +++ b/libcst/tests/pyre/simple_class.json @@ -1,511 +1,511 @@ { "types": [ { + "annotation": "typing.Type[typing.Sequence]", "location": { "start": { - "line": 7, - "column": 19 + "column": 19, + "line": 7 }, "stop": { - "line": 7, - "column": 27 + "column": 27, + "line": 7 } - }, - "annotation": "typing.Type[typing.Sequence]" + } }, { + "annotation": "typing.Type[simple_class.Item]", "location": { "start": { - "line": 10, - "column": 6 + "column": 6, + "line": 10 }, "stop": { - "line": 10, - "column": 10 + "column": 10, + "line": 10 } - }, - "annotation": "typing.Type[simple_class.Item]" + } }, { + "annotation": "typing.Callable(simple_class.Item.__init__)[[Named(self, simple_class.Item), Named(n, int)], None]", "location": { "start": { - "line": 11, - "column": 8 + "column": 8, + "line": 11 }, "stop": { - "line": 11, - "column": 16 + "column": 16, + "line": 11 } - }, - "annotation": "typing.Callable(simple_class.Item.__init__)[[Named(self, simple_class.Item), Named(n, int)], None]" + } }, { + "annotation": "simple_class.Item", "location": { "start": { - "line": 11, - "column": 17 + "column": 17, + "line": 11 }, "stop": { - "line": 11, - "column": 21 + "column": 21, + "line": 11 } - }, - "annotation": "simple_class.Item" + } }, { + "annotation": "int", "location": { "start": { - "line": 11, - "column": 23 + "column": 23, + "line": 11 }, "stop": { - "line": 11, - "column": 29 + "column": 24, + "line": 11 } - }, - "annotation": "int" + } }, { + "annotation": "typing.Type[int]", "location": { "start": { - "line": 11, - "column": 26 + "column": 26, + "line": 11 }, "stop": { - "line": 11, - "column": 29 + "column": 29, + "line": 11 } - }, - "annotation": "typing.Type[int]" + } }, { + "annotation": "None", "location": { "start": { - "line": 11, - "column": 34 + "column": 34, + "line": 11 }, "stop": { - "line": 11, - "column": 38 + "column": 38, + "line": 11 } - }, - "annotation": "None" + } }, { + "annotation": "simple_class.Item", "location": { "start": { - "line": 12, - "column": 8 + "column": 8, + "line": 12 }, "stop": { - "line": 12, - "column": 12 + "column": 12, + "line": 12 } - }, - "annotation": "simple_class.Item" + } }, { + "annotation": "int", "location": { "start": { - "line": 12, - "column": 8 + "column": 8, + "line": 12 }, "stop": { - "line": 12, - "column": 19 + "column": 19, + "line": 12 } - }, - "annotation": "int" + } }, { + "annotation": "typing.Type[int]", "location": { "start": { - "line": 12, - "column": 21 + "column": 21, + "line": 12 }, "stop": { - "line": 12, - "column": 24 + "column": 24, + "line": 12 } - }, - "annotation": "typing.Type[int]" + } }, { + "annotation": "int", "location": { "start": { - "line": 12, - "column": 27 + "column": 27, + "line": 12 }, "stop": { - "line": 12, - "column": 28 + "column": 28, + "line": 12 } - }, - "annotation": "int" + } }, { + "annotation": "typing.Type[simple_class.ItemCollector]", "location": { "start": { - "line": 15, - "column": 6 + "column": 6, + "line": 15 }, "stop": { - "line": 15, - "column": 19 + "column": 19, + "line": 15 } - }, - "annotation": "typing.Type[simple_class.ItemCollector]" + } }, { + "annotation": "typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]]", "location": { "start": { - "line": 16, - "column": 8 + "column": 8, + "line": 16 }, "stop": { - "line": 16, - "column": 17 + "column": 17, + "line": 16 } - }, - "annotation": "typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]]" + } }, { + "annotation": "simple_class.ItemCollector", "location": { "start": { - "line": 16, - "column": 18 + "column": 18, + "line": 16 }, "stop": { - "line": 16, - "column": 22 + "column": 22, + "line": 16 } - }, - "annotation": "simple_class.ItemCollector" + } }, { + "annotation": "int", "location": { "start": { - "line": 16, - "column": 24 + "column": 24, + "line": 16 }, "stop": { - "line": 16, - "column": 30 + "column": 25, + "line": 16 } - }, - "annotation": "int" + } }, { + "annotation": "typing.Type[int]", "location": { "start": { - "line": 16, - "column": 27 + "column": 27, + "line": 16 }, "stop": { - "line": 16, - "column": 30 + "column": 30, + "line": 16 } - }, - "annotation": "typing.Type[int]" + } }, { + "annotation": "BoundMethod[typing.Callable(typing.GenericMeta.__getitem__)[[Named(self, unknown), typing.Type[Variable[typing._T_co](covariant)]], typing.Type[typing.Sequence[Variable[typing._T_co](covariant)]]], typing.Type[typing.Sequence]]", "location": { "start": { - "line": 16, - "column": 35 + "column": 35, + "line": 16 }, "stop": { - "line": 16, - "column": 43 + "column": 43, + "line": 16 } - }, - "annotation": "BoundMethod[typing.Callable(typing.GenericMeta.__getitem__)[[Named(self, unknown), typing.Type[Variable[typing._T_co](covariant)]], typing.Type[typing.Sequence[Variable[typing._T_co](covariant)]]], typing.Type[typing.Sequence]]" + } }, { + "annotation": "typing.Type[typing.Sequence[simple_class.Item]]", "location": { "start": { - "line": 16, - "column": 35 + "column": 35, + "line": 16 }, "stop": { - "line": 16, - "column": 49 + "column": 49, + "line": 16 } - }, - "annotation": "typing.Type[typing.Sequence[simple_class.Item]]" + } }, { + "annotation": "typing.Type[simple_class.Item]", "location": { "start": { - "line": 16, - "column": 44 + "column": 44, + "line": 16 }, "stop": { - "line": 16, - "column": 48 + "column": 48, + "line": 16 } - }, - "annotation": "typing.Type[simple_class.Item]" + } }, { + "annotation": "typing.List[simple_class.Item]", "location": { "start": { - "line": 17, - "column": 15 + "column": 15, + "line": 17 }, "stop": { - "line": 17, - "column": 42 + "column": 42, + "line": 17 } - }, - "annotation": "typing.List[simple_class.Item]" + } }, { + "annotation": "typing.Type[simple_class.Item]", "location": { "start": { - "line": 17, - "column": 16 + "column": 16, + "line": 17 }, "stop": { - "line": 17, - "column": 20 + "column": 20, + "line": 17 } - }, - "annotation": "typing.Type[simple_class.Item]" + } }, { + "annotation": "simple_class.Item", "location": { "start": { - "line": 17, - "column": 16 + "column": 16, + "line": 17 }, "stop": { - "line": 17, - "column": 23 + "column": 23, + "line": 17 } - }, - "annotation": "simple_class.Item" + } }, { + "annotation": "int", "location": { "start": { - "line": 17, - "column": 28 + "column": 28, + "line": 17 }, "stop": { - "line": 17, - "column": 29 + "column": 29, + "line": 17 } - }, - "annotation": "int" + } }, { + "annotation": "typing.Type[range]", "location": { "start": { - "line": 17, - "column": 33 + "column": 33, + "line": 17 }, "stop": { - "line": 17, - "column": 38 + "column": 38, + "line": 17 } - }, - "annotation": "typing.Type[range]" + } }, { + "annotation": "range", "location": { "start": { - "line": 17, - "column": 33 + "column": 33, + "line": 17 }, "stop": { - "line": 17, - "column": 41 + "column": 41, + "line": 17 } - }, - "annotation": "range" + } }, { + "annotation": "int", "location": { "start": { - "line": 17, - "column": 39 + "column": 39, + "line": 17 }, "stop": { - "line": 17, - "column": 40 + "column": 40, + "line": 17 } - }, - "annotation": "int" + } }, { + "annotation": "simple_class.ItemCollector", "location": { "start": { - "line": 20, - "column": 0 + "column": 0, + "line": 20 }, "stop": { - "line": 20, - "column": 9 + "column": 9, + "line": 20 } - }, - "annotation": "simple_class.ItemCollector" + } }, { + "annotation": "typing.Type[simple_class.ItemCollector]", "location": { "start": { - "line": 20, - "column": 12 + "column": 12, + "line": 20 }, "stop": { - "line": 20, - "column": 25 + "column": 25, + "line": 20 } - }, - "annotation": "typing.Type[simple_class.ItemCollector]" + } }, { + "annotation": "simple_class.ItemCollector", "location": { "start": { - "line": 20, - "column": 12 + "column": 12, + "line": 20 }, "stop": { - "line": 20, - "column": 27 + "column": 27, + "line": 20 } - }, - "annotation": "simple_class.ItemCollector" + } }, { + "annotation": "typing.Sequence[simple_class.Item]", "location": { "start": { - "line": 21, - "column": 0 + "column": 0, + "line": 21 }, "stop": { - "line": 21, - "column": 5 + "column": 5, + "line": 21 } - }, - "annotation": "typing.Sequence[simple_class.Item]" + } }, { + "annotation": "typing.Type[typing.Sequence[simple_class.Item]]", "location": { "start": { - "line": 21, - "column": 7 + "column": 7, + "line": 21 }, "stop": { - "line": 21, - "column": 21 + "column": 21, + "line": 21 } - }, - "annotation": "typing.Type[typing.Sequence[simple_class.Item]]" + } }, { + "annotation": "simple_class.ItemCollector", "location": { "start": { - "line": 21, - "column": 24 + "column": 24, + "line": 21 }, "stop": { - "line": 21, - "column": 33 + "column": 33, + "line": 21 } - }, - "annotation": "simple_class.ItemCollector" + } }, { + "annotation": "BoundMethod[typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]], simple_class.ItemCollector]", "location": { "start": { - "line": 21, - "column": 24 + "column": 24, + "line": 21 }, "stop": { - "line": 21, - "column": 43 + "column": 43, + "line": 21 } - }, - "annotation": "BoundMethod[typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]], simple_class.ItemCollector]" + } }, { + "annotation": "typing.Sequence[simple_class.Item]", "location": { "start": { - "line": 21, - "column": 24 + "column": 24, + "line": 21 }, "stop": { - "line": 21, - "column": 46 + "column": 46, + "line": 21 } - }, - "annotation": "typing.Sequence[simple_class.Item]" + } }, { + "annotation": "typing_extensions.Literal[3]", "location": { "start": { - "line": 21, - "column": 44 + "column": 44, + "line": 21 }, "stop": { - "line": 21, - "column": 45 + "column": 45, + "line": 21 } - }, - "annotation": "typing_extensions.Literal[3]" + } }, { + "annotation": "simple_class.Item", "location": { "start": { - "line": 22, - "column": 4 + "column": 4, + "line": 22 }, "stop": { - "line": 22, - "column": 8 + "column": 8, + "line": 22 } - }, - "annotation": "simple_class.Item" + } }, { + "annotation": "typing.Sequence[simple_class.Item]", "location": { "start": { - "line": 22, - "column": 12 + "column": 12, + "line": 22 }, "stop": { - "line": 22, - "column": 17 + "column": 17, + "line": 22 } - }, - "annotation": "typing.Sequence[simple_class.Item]" + } }, { + "annotation": "simple_class.Item", "location": { "start": { - "line": 23, - "column": 4 + "column": 4, + "line": 23 }, "stop": { - "line": 23, - "column": 8 + "column": 8, + "line": 23 } - }, - "annotation": "simple_class.Item" + } }, { + "annotation": "int", "location": { "start": { - "line": 23, - "column": 4 + "column": 4, + "line": 23 }, "stop": { - "line": 23, - "column": 15 + "column": 15, + "line": 23 } - }, - "annotation": "int" + } } ] } \ No newline at end of file diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 021385af..679b2d5e 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -5,17 +5,12 @@ import json -import os from pathlib import Path from typing import Dict, List, Mapping, Optional, Tuple, Union import libcst as cst from libcst.metadata import MetadataWrapper, PositionProvider -from libcst.metadata.type_inference_provider import ( - _process_pyre_data, - PyreData, - run_command, -) +from libcst.metadata.type_inference_provider import PyreData from libcst.testing.utils import data_provider, UnitTest TEST_SUITE_PATH: Path = Path(__file__).parent / "pyre" @@ -117,29 +112,7 @@ class PyreIntegrationTest(UnitTest): if __name__ == "__main__": - """Run this script directly to generate pyre data for test suite (tests/pyre/*.py)""" - print("start pyre server") - stdout: str - stderr: str - return_code: int - os.chdir(TEST_SUITE_PATH) - stdout, stderr, return_code = run_command(["pyre", "start", "--no-watchman"]) - if return_code != 0: - print(stdout) - print(stderr) + import sys - for path in TEST_SUITE_PATH.glob("*.py"): - # Pull params into it's own arg to avoid the string escaping in subprocess - params = f"path='{path}'" - cmd = ["pyre", "query", f"types({params})"] - print(cmd) - stdout, stderr, return_code = run_command(cmd) - if return_code != 0: - print(stdout) - print(stderr) - data = json.loads(stdout) - data = data["response"][0] - data = _process_pyre_data(data) - output_path = path.with_suffix(".json") - print(f"write output to {output_path}") - output_path.write_text(json.dumps(data, indent=2)) + print("run `scripts/regenerate-fixtures.py` instead") + sys.exit(1) diff --git a/requirements-dev.txt b/requirements-dev.txt index d34c666a..8155a99a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -9,7 +9,7 @@ jupyter>=1.0.0 maturin>=0.8.3,<0.14 nbsphinx>=0.4.2 prompt-toolkit>=2.0.9 -pyre-check==0.9.9; platform_system != "Windows" +pyre-check==0.9.10; platform_system != "Windows" setuptools_scm>=6.0.1 sphinx-rtd-theme>=0.4.3 ufmt==2.0.1 diff --git a/scripts/regenerate-fixtures.py b/scripts/regenerate-fixtures.py new file mode 100644 index 00000000..2b67b304 --- /dev/null +++ b/scripts/regenerate-fixtures.py @@ -0,0 +1,42 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +""" +Regenerate test fixtures, eg. after upgrading Pyre +""" + +import json +import os +from pathlib import Path +from subprocess import run + +from libcst.metadata import TypeInferenceProvider + + +def main() -> None: + CWD = Path.cwd() + repo_root = Path(__file__).parent.parent + test_root = repo_root / "libcst" / "tests" / "pyre" + + try: + os.chdir(test_root) + run(["pyre", "-n", "start", "--no-watchman"], check=True) + + for file_path in test_root.glob("*.py"): + json_path = file_path.with_suffix(".json") + print(f"generating {file_path} -> {json_path}") + + path_str = file_path.as_posix() + cache = TypeInferenceProvider.gen_cache(test_root, [path_str], timeout=None) + result = cache[path_str] + json_path.write_text(json.dumps(result, sort_keys=True, indent=2)) + + finally: + run(["pyre", "-n", "stop"], check=True) + os.chdir(CWD) + + +if __name__ == "__main__": + main() From f9536b522f58d2b70ae8beb6b607b45ef08620e6 Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Wed, 22 Feb 2023 14:36:10 -0800 Subject: [PATCH 346/632] Pass root path to FullyQualifiedNameProvider (#867) This allows FullyQualifiedNameProvider to work with absolute paths, rather than assuming all paths given will be relative to the current directory. This enables tools like Fixit to provide a root path, and have the FullyQualifiedNameProvider correctly scope the final results relative to that root path. This does require that both the root path and the given file paths match the other as relative or absolute, due to the `calculate_module_and_package` helper comparing file paths relative to the root path, but this seems like a reasonable tradeoff, and unlikely to cause a problem in normal use cases. --- libcst/_types.py | 4 +++- libcst/helpers/module.py | 5 ++++- libcst/metadata/full_repo_manager.py | 3 ++- libcst/metadata/name_provider.py | 2 +- libcst/metadata/tests/test_name_provider.py | 13 ++++++++----- 5 files changed, 18 insertions(+), 9 deletions(-) diff --git a/libcst/_types.py b/libcst/_types.py index 8df90ee5..24055a5c 100644 --- a/libcst/_types.py +++ b/libcst/_types.py @@ -4,7 +4,8 @@ # LICENSE file in the root directory of this source tree. -from typing import TYPE_CHECKING, TypeVar +from pathlib import PurePath +from typing import TYPE_CHECKING, TypeVar, Union if TYPE_CHECKING: from libcst._nodes.base import CSTNode # noqa: F401 @@ -12,3 +13,4 @@ if TYPE_CHECKING: CSTNodeT = TypeVar("CSTNodeT", bound="CSTNode") CSTNodeT_co = TypeVar("CSTNodeT_co", bound="CSTNode", covariant=True) +StrPath = Union[str, PurePath] diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index f9961807..2ff5ef00 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -9,6 +9,7 @@ from pathlib import PurePath from typing import List, Optional from libcst import Comment, EmptyLine, ImportFrom, Module +from libcst._types import StrPath from libcst.helpers.expression import get_full_name_for_node @@ -130,7 +131,9 @@ class ModuleNameAndPackage: package: str -def calculate_module_and_package(repo_root: str, filename: str) -> ModuleNameAndPackage: +def calculate_module_and_package( + repo_root: StrPath, filename: StrPath +) -> ModuleNameAndPackage: # Given an absolute repo_root and an absolute filename, calculate the # python module name for the file. relative_filename = PurePath(filename).relative_to(repo_root) diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index 6a7c1e9a..83bb6e83 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -8,6 +8,7 @@ from pathlib import Path from typing import Collection, Dict, List, Mapping, TYPE_CHECKING import libcst as cst +from libcst._types import StrPath from libcst.metadata.wrapper import MetadataWrapper if TYPE_CHECKING: @@ -17,7 +18,7 @@ if TYPE_CHECKING: class FullRepoManager: def __init__( self, - repo_root_dir: str, + repo_root_dir: StrPath, paths: Collection[str], providers: Collection["ProviderT"], timeout: int = 5, diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py index 60d8763e..1868fa66 100644 --- a/libcst/metadata/name_provider.py +++ b/libcst/metadata/name_provider.py @@ -114,7 +114,7 @@ class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedN def gen_cache( cls, root_path: Path, paths: List[str], timeout: Optional[int] = None ) -> Mapping[str, ModuleNameAndPackage]: - cache = {path: calculate_module_and_package(".", path) for path in paths} + cache = {path: calculate_module_and_package(root_path, path) for path in paths} return cache def __init__(self, cache: ModuleNameAndPackage) -> None: diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index 9f381368..c1ba59bc 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -560,11 +560,14 @@ class FullyQualifiedNameProviderTest(UnitTest): class FullyQualifiedNameIntegrationTest(UnitTest): def test_with_full_repo_manager(self) -> None: with TemporaryDirectory() as dir: - fname = "pkg/mod.py" - (Path(dir) / "pkg").mkdir() - (Path(dir) / fname).touch() - mgr = FullRepoManager(dir, [fname], [FullyQualifiedNameProvider]) - wrapper = mgr.get_metadata_wrapper_for_path(fname) + root = Path(dir) + file_path = root / "pkg/mod.py" + file_path.parent.mkdir() + file_path.touch() + + file_path_str = file_path.as_posix() + mgr = FullRepoManager(root, [file_path_str], [FullyQualifiedNameProvider]) + wrapper = mgr.get_metadata_wrapper_for_path(file_path_str) fqnames = wrapper.resolve(FullyQualifiedNameProvider) (mod, names) = next(iter(fqnames.items())) self.assertIsInstance(mod, cst.Module) From 5ccba6b0d3d9bc54b14703a98dbfab1ee222cc43 Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Wed, 8 Mar 2023 03:14:43 -0800 Subject: [PATCH 347/632] Use new setup-python caching actions (#874) With the latest setup-python actions, there is a better caching mechanism available that also requires less setup, and provides better fallback behavior that should help avoid the random CI failures that have been happening on 3.11 for setuptools-rust. This ensures that we install the necessary dependencies before attempting to build the package or run tests, while still enabling speedups in best case scenario when requirements files haven't changed. See the upstream readme for details: https://github.com/actions/setup-python#caching-packages-dependencies --- .github/workflows/build.yml | 8 ++---- .github/workflows/ci.yml | 42 +++++++++---------------------- .github/workflows/pypi_upload.yml | 8 ++---- 3 files changed, 16 insertions(+), 42 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5f2457ae..d7f8d262 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -43,13 +43,9 @@ jobs: - uses: actions/setup-python@v4 if: ${{ !contains(matrix.os, 'self-hosted') }} with: + cache: pip + cache-dependency-path: "**/requirements*.txt" python-version: "3.10" - - uses: actions/cache@v3 - id: cache - if: ${{ !contains(matrix.os, 'self-hosted') }} - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - name: Rust Cache if: ${{ !contains(matrix.os, 'self-hosted') }} uses: Swatinem/rust-cache@v1.3.0 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b1a0b2dd..fddf58c7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,14 +24,10 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: + cache: pip + cache-dependency-path: "**/requirements*.txt" python-version: ${{ matrix.python-version }} - - uses: actions/cache@v3 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' run: | pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - if: ${{ matrix.parser == 'native' }} @@ -55,14 +51,10 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: + cache: pip + cache-dependency-path: "**/requirements*.txt" python-version: "3.10" - - uses: actions/cache@v3 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' run: | pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - run: flake8 @@ -78,14 +70,10 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: + cache: pip + cache-dependency-path: "**/requirements*.txt" python-version: "3.10" - - uses: actions/cache@v3 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' run: | pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Make sure Pyre uses the working copy @@ -102,14 +90,10 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: + cache: pip + cache-dependency-path: "**/requirements*.txt" python-version: "3.10" - - uses: actions/cache@v3 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' run: | pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Generate Coverage @@ -134,14 +118,10 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: + cache: pip + cache-dependency-path: "**/requirements*.txt" python-version: "3.10" - - uses: actions/cache@v3 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' run: | pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - uses: ts-graphviz/setup-graphviz@v1 @@ -172,6 +152,8 @@ jobs: working-directory: native - uses: actions/setup-python@v4 with: + cache: pip + cache-dependency-path: "**/requirements*.txt" python-version: "3.10" - name: test uses: actions-rs/cargo@v1 diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 963e04eb..0d7ae3c2 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -26,14 +26,10 @@ jobs: path: wheelhouse - uses: actions/setup-python@v4 with: + cache: pip + cache-dependency-path: "**/requirements*.txt" python-version: "3.10" - - uses: actions/cache@v3 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' run: | pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Disable scmtools local scheme From 577e5d5cd4ff96d7d1787680db9e0f541d6f9528 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 9 Mar 2023 09:15:36 +0000 Subject: [PATCH 348/632] [ci] Fix pypi_upload workflow (#889) pypi_upload has been broken since #810, because `actions/checkout` defaults to a shallow checkout that only checks out the revision triggering the workflow. This causes setuptools-scm to miss the most recent tag, causing the version to be detected as `0.1`. --- .github/workflows/build.yml | 2 ++ .github/workflows/pypi_upload.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d7f8d262..a6e62511 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -40,6 +40,8 @@ jobs: CIBW_BUILD_VERBOSITY: 1 steps: - uses: actions/checkout@v3 + with: + fetch-depth: 0 - uses: actions/setup-python@v4 if: ${{ !contains(matrix.os, 'self-hosted') }} with: diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 0d7ae3c2..d8ae1d4a 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -18,6 +18,8 @@ jobs: needs: build steps: - uses: actions/checkout@v3 + with: + fetch-depth: 0 - name: Download binary wheels id: download uses: actions/download-artifact@v3 From 71183c65d7937624a4220bc075b0bf393af4b444 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 08:27:21 +0000 Subject: [PATCH 349/632] Bump pypa/cibuildwheel from 2.12.0 to 2.12.1 (#891) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.12.0 to 2.12.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.12.0...v2.12.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a6e62511..e1430540 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -58,7 +58,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.12.0 + uses: pypa/cibuildwheel@v2.12.1 - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl From c876db6d2d7c8e2063352eedeecdb54b12e61781 Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Mon, 13 Mar 2023 18:02:04 -0700 Subject: [PATCH 350/632] Add new FilePathProvider Caches file path information on the root `Module` node. Resolves paths when caching, so they are always absolute paths. Adds a new `chdir` helper to change working directory and automatically revert to previous directory, which makes testing file paths with the `"."` repo root easier. ghstack-source-id: 3413905fc1f719beba8db68895c4fbb7156660d4 Pull Request resolved: https://github.com/Instagram/LibCST/pull/892 --- docs/source/metadata.rst | 8 + libcst/helpers/paths.py | 25 +++ libcst/helpers/tests/test_paths.py | 58 +++++++ libcst/metadata/__init__.py | 2 + libcst/metadata/file_path_provider.py | 55 +++++++ .../metadata/tests/test_file_path_provider.py | 145 ++++++++++++++++++ 6 files changed, 293 insertions(+) create mode 100644 libcst/helpers/paths.py create mode 100644 libcst/helpers/tests/test_paths.py create mode 100644 libcst/metadata/file_path_provider.py create mode 100644 libcst/metadata/tests/test_file_path_provider.py diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index 172b1b51..e568031e 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -226,6 +226,14 @@ We provide :class:`~libcst.metadata.ParentNodeProvider` for those use cases. .. autoclass:: libcst.metadata.ParentNodeProvider :no-undoc-members: +File Path Metadata +------------------ +This provides the absolute file path on disk for any module being visited. +Requires an active :class:`~libcst.metadata.FullRepoManager` when using this provider. + +.. autoclass:: libcst.metadata.FilePathProvider + :no-undoc-members: + Type Inference Metadata ----------------------- `Type inference `__ is to automatically infer diff --git a/libcst/helpers/paths.py b/libcst/helpers/paths.py new file mode 100644 index 00000000..00830ce9 --- /dev/null +++ b/libcst/helpers/paths.py @@ -0,0 +1,25 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import os +from contextlib import contextmanager +from pathlib import Path +from typing import Generator + +from libcst._types import StrPath + + +@contextmanager +def chdir(path: StrPath) -> Generator[Path, None, None]: + """ + Temporarily chdir to the given path, and then return to the previous path. + """ + try: + path = Path(path).resolve() + cwd = os.getcwd() + os.chdir(path) + yield path + finally: + os.chdir(cwd) diff --git a/libcst/helpers/tests/test_paths.py b/libcst/helpers/tests/test_paths.py new file mode 100644 index 00000000..c36273d4 --- /dev/null +++ b/libcst/helpers/tests/test_paths.py @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from pathlib import Path +from tempfile import TemporaryDirectory + +from libcst.helpers.paths import chdir +from libcst.testing.utils import UnitTest + + +class PathsTest(UnitTest): + def test_chdir(self) -> None: + with TemporaryDirectory() as td: + tdp = Path(td).resolve() + inner = tdp / "foo" / "bar" + inner.mkdir(parents=True) + + with self.subTest("string paths"): + cwd1 = Path.cwd() + + with chdir(tdp.as_posix()) as path2: + cwd2 = Path.cwd() + self.assertEqual(tdp, cwd2) + self.assertEqual(tdp, path2) + + with chdir(inner.as_posix()) as path3: + cwd3 = Path.cwd() + self.assertEqual(inner, cwd3) + self.assertEqual(inner, path3) + + cwd4 = Path.cwd() + self.assertEqual(tdp, cwd4) + self.assertEqual(cwd2, cwd4) + + cwd5 = Path.cwd() + self.assertEqual(cwd1, cwd5) + + with self.subTest("pathlib objects"): + cwd1 = Path.cwd() + + with chdir(tdp) as path2: + cwd2 = Path.cwd() + self.assertEqual(tdp, cwd2) + self.assertEqual(tdp, path2) + + with chdir(inner) as path3: + cwd3 = Path.cwd() + self.assertEqual(inner, cwd3) + self.assertEqual(inner, path3) + + cwd4 = Path.cwd() + self.assertEqual(tdp, cwd4) + self.assertEqual(cwd2, cwd4) + + cwd5 = Path.cwd() + self.assertEqual(cwd1, cwd5) diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index 66e7e525..ecc42741 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -16,6 +16,7 @@ from libcst.metadata.expression_context_provider import ( ExpressionContext, ExpressionContextProvider, ) +from libcst.metadata.file_path_provider import FilePathProvider from libcst.metadata.full_repo_manager import FullRepoManager from libcst.metadata.name_provider import ( FullyQualifiedNameProvider, @@ -88,6 +89,7 @@ __all__ = [ "TypeInferenceProvider", "FullRepoManager", "AccessorProvider", + "FilePathProvider", # Experimental APIs: "ExperimentalReentrantCodegenProvider", "CodegenPartial", diff --git a/libcst/metadata/file_path_provider.py b/libcst/metadata/file_path_provider.py new file mode 100644 index 00000000..681d3793 --- /dev/null +++ b/libcst/metadata/file_path_provider.py @@ -0,0 +1,55 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from pathlib import Path +from typing import Collection, List, Mapping, Optional + +import libcst as cst +from libcst.metadata.base_provider import BatchableMetadataProvider + + +class FilePathProvider(BatchableMetadataProvider[Collection[Path]]): + """ + Provides the path to the current file on disk as metadata for the root + :class:`~libcst.Module` node. Requires a :class:`~libcst.metadata.FullRepoManager`. + The returned path will always be resolved to an absolute path using + :func:`pathlib.Path.resolve`. + + Example usage: + + .. code:: python + + class CustomVisitor(CSTVisitor): + METADATA_DEPENDENCIES = [FilePathProvider] + + path: pathlib.Path + + def visit_Module(self, node: libcst.Module) -> None: + self.path = self.get_metadata(FilePathProvider, node) + + .. code:: + + >>> mgr = FullRepoManager(".", {"libcst/_types.py"}, {FilePathProvider}) + >>> wrapper = mgr.get_metadata_wrapper_for_path("libcst/_types.py") + >>> fqnames = wrapper.resolve(FilePathProvider) + >>> {type(k): v for k, v in wrapper.resolve(FilePathProvider).items()} + {: PosixPath('/home/user/libcst/_types.py')} + + """ + + @classmethod + def gen_cache( + cls, root_path: Path, paths: List[str], timeout: Optional[int] = None + ) -> Mapping[str, Path]: + cache = {path: (root_path / path).resolve() for path in paths} + return cache + + def __init__(self, cache: Path) -> None: + super().__init__(cache) + self.path: Path = cache + + def visit_Module(self, node: cst.Module) -> Optional[bool]: + self.set_metadata(node, self.path) + return False diff --git a/libcst/metadata/tests/test_file_path_provider.py b/libcst/metadata/tests/test_file_path_provider.py new file mode 100644 index 00000000..2b0631f5 --- /dev/null +++ b/libcst/metadata/tests/test_file_path_provider.py @@ -0,0 +1,145 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import Set + +import libcst +from libcst._visitors import CSTVisitor +from libcst.helpers.paths import chdir +from libcst.metadata import FilePathProvider, FullRepoManager, MetadataWrapper +from libcst.testing.utils import UnitTest + + +class FilePathProviderTest(UnitTest): + def setUp(self) -> None: + self.td = TemporaryDirectory() + self.tdp = Path(self.td.name).resolve() + self.addCleanup(self.td.cleanup) + + def test_provider_cache(self) -> None: + pkg = self.tdp / "pkg" + pkg.mkdir() + files = [Path(pkg / name) for name in ("file1.py", "file2.py", "file3.py")] + [file.write_text("print('hello')\n") for file in files] + + with self.subTest("absolute paths"): + repo_manager = FullRepoManager( + self.tdp, [f.as_posix() for f in files], {FilePathProvider} + ) + repo_manager.resolve_cache() + + expected = { + FilePathProvider: {f.as_posix(): f for f in files}, + } + self.assertDictEqual(expected, repo_manager.cache) + + with self.subTest("repo relative paths"): + repo_manager = FullRepoManager( + self.tdp, + [f.relative_to(self.tdp).as_posix() for f in files], + {FilePathProvider}, + ) + repo_manager.resolve_cache() + + expected = { + FilePathProvider: { + f.relative_to(self.tdp).as_posix(): f for f in files + }, + } + self.assertDictEqual(expected, repo_manager.cache) + + with self.subTest("dot relative paths"): + with chdir(self.tdp): + repo_manager = FullRepoManager( + ".", + [f.relative_to(self.tdp).as_posix() for f in files], + {FilePathProvider}, + ) + repo_manager.resolve_cache() + + expected = { + FilePathProvider: { + f.relative_to(self.tdp).as_posix(): f for f in files + }, + } + self.assertDictEqual(expected, repo_manager.cache) + + def test_visitor(self) -> None: + pkg = self.tdp / "pkg" + pkg.mkdir() + files = [Path(pkg / name) for name in ("file1.py", "file2.py", "file3.py")] + [file.write_text("print('hello')\n") for file in files] + + seen: Set[Path] = set() + + class FakeVisitor(CSTVisitor): + METADATA_DEPENDENCIES = [FilePathProvider] + + def visit_Module(self, node: libcst.Module) -> None: + seen.add(self.get_metadata(FilePathProvider, node)) + + with self.subTest("absolute paths"): + seen.clear() + repo_manager = FullRepoManager( + self.tdp, [f.as_posix() for f in files], {FilePathProvider} + ) + repo_manager.resolve_cache() + + for file in files: + module = libcst.parse_module(file.read_bytes()) + wrapper = MetadataWrapper( + module, cache=repo_manager.get_cache_for_path(file.as_posix()) + ) + wrapper.visit(FakeVisitor()) + + expected = set(files) + self.assertSetEqual(expected, seen) + + with self.subTest("repo relative paths"): + seen.clear() + repo_manager = FullRepoManager( + self.tdp, + [f.relative_to(self.tdp).as_posix() for f in files], + {FilePathProvider}, + ) + repo_manager.resolve_cache() + + for file in files: + module = libcst.parse_module(file.read_bytes()) + wrapper = MetadataWrapper( + module, + cache=repo_manager.get_cache_for_path( + file.relative_to(self.tdp).as_posix() + ), + ) + wrapper.visit(FakeVisitor()) + + expected = set(files) + self.assertSetEqual(expected, seen) + + with self.subTest("dot relative paths"): + with chdir(self.tdp): + seen.clear() + repo_manager = FullRepoManager( + ".", + [f.relative_to(self.tdp).as_posix() for f in files], + {FilePathProvider}, + ) + repo_manager.resolve_cache() + + for file in files: + module = libcst.parse_module(file.read_bytes()) + wrapper = MetadataWrapper( + module, + cache=repo_manager.get_cache_for_path( + file.relative_to(self.tdp).as_posix() + ), + ) + wrapper.visit(FakeVisitor()) + + expected = set(files) + self.assertSetEqual(expected, seen) From 497f7784c5a4405bf9833c1f78087713154d8627 Mon Sep 17 00:00:00 2001 From: Mikhail Podtserkovskiy Date: Tue, 14 Mar 2023 22:07:40 +0000 Subject: [PATCH 351/632] Fix: relative imports from '' package are not allowed (#894) --- libcst/helpers/module.py | 2 +- libcst/helpers/tests/test_module.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 2ff5ef00..3c26122d 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -90,7 +90,7 @@ def get_absolute_module_from_package( if num_dots == 0: # This is an absolute import, so the module is correct. return module_name - if current_package is None: + if current_package is None or current_package == "": # We don't actually have the current module available, so we can't compute # the absolute module from relative. return None diff --git a/libcst/helpers/tests/test_module.py b/libcst/helpers/tests/test_module.py index ea8bf43f..7260f5cc 100644 --- a/libcst/helpers/tests/test_module.py +++ b/libcst/helpers/tests/test_module.py @@ -115,6 +115,9 @@ class ModuleTest(UnitTest): ("x/y/z/__init__.py", "from a.b import c", "a.b"), # Relative import that can't be resolved due to missing module. (None, "from ..w import c", None), + # Attempted relative import with no known parent package + ("__init__.py", "from .y import z", None), + ("x.py", "from .y import z", None), # Relative import that goes past the module level. ("x.py", "from ...y import z", None), ("x/y/z.py", "from ... import c", None), From 6a7b82e2b66a78111ff8fb9be9dea4d2fc84844e Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Tue, 14 Mar 2023 19:12:36 -0700 Subject: [PATCH 352/632] PEP 621 + hatch to run tests/lint/etc Moves PEP 621 metadata from `setup.py` and `requirements*.txt` into the `[project]` table of `pyproject.toml`. This enables using hatch as a task runner for the project, where previously one would need to remember a bunch of different commands, or repeatedly consult the readme's developer guide to find all of the relevant commands. This creates the following hatch commands: - docs - fixtures - format - lint - test - typecheck It also updates all of the github actions workflows to use the appropriate hatch commands, and the readme's developer guide, so that there is only one source of truth for what constitutes running tests. The "test" workflows now drop the matrix distinction between "pure" or "native", and run tests in both modes from a single build. ghstack-source-id: 8834da7825436d63365f9e64aee42ac40f76c5f2 Pull Request resolved: https://github.com/Instagram/LibCST/pull/893 --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 87 ++++++++++++------------ .github/workflows/pypi_upload.yml | 14 ++-- .gitignore | 1 + .readthedocs.yml | 4 +- MANIFEST.in | 2 +- README.rst | 60 ++++++++-------- check_copyright.sh | 30 -------- libcst/codemod/tests/test_codemod_cli.py | 4 ++ libcst/tests/__main__.py | 15 ++++ pyproject.toml | 81 ++++++++++++++++++++-- requirements-dev.txt | 19 ------ requirements.txt | 4 -- scripts/check_copyright.py | 58 ++++++++++++++++ setup.py | 40 +---------- 15 files changed, 237 insertions(+), 184 deletions(-) delete mode 100755 check_copyright.sh create mode 100644 libcst/tests/__main__.py delete mode 100644 requirements-dev.txt delete mode 100644 requirements.txt create mode 100644 scripts/check_copyright.py diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e1430540..c86e54c5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -46,7 +46,7 @@ jobs: if: ${{ !contains(matrix.os, 'self-hosted') }} with: cache: pip - cache-dependency-path: "**/requirements*.txt" + cache-dependency-path: "pyproject.toml" python-version: "3.10" - name: Rust Cache if: ${{ !contains(matrix.os, 'self-hosted') }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fddf58c7..a8dcc1cc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,93 +13,91 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [macos-latest, ubuntu-latest, windows-latest] python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] - parser: [pure, native] exclude: # skip these builds until https://bugs.launchpad.net/lxml/+bug/1977998 is resolved - os: windows-latest python-version: "3.11" steps: - uses: actions/checkout@v3 + with: + fetch-depth: 0 - uses: actions/setup-python@v4 with: cache: pip - cache-dependency-path: "**/requirements*.txt" + cache-dependency-path: "pyproject.toml" python-version: ${{ matrix.python-version }} - - name: Install Dependencies + - name: Install hatch run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - if: ${{ matrix.parser == 'native' }} - uses: actions-rs/toolchain@v1 + pip install -U hatch + - uses: actions-rs/toolchain@v1 with: toolchain: stable - - if: ${{ matrix.parser == 'native' }} - name: Rust Cache + - name: Rust Cache uses: Swatinem/rust-cache@v1.3.0 with: working-directory: native - - run: >- - echo LIBCST_PARSER_TYPE=${{ matrix.parser }} >> $GITHUB_ENV - - name: Run Tests - run: python setup.py test + - name: Build LibCST + run: hatch env create + - name: Pure Tests + run: hatch run test + - name: Native Tests + env: + LIBCST_PARSER_TYPE: ${{ matrix.parser }} + run: hatch run test # Run linters lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 + with: + fetch-depth: 0 - uses: actions/setup-python@v4 with: cache: pip - cache-dependency-path: "**/requirements*.txt" + cache-dependency-path: "pyproject.toml" python-version: "3.10" - - name: Install Dependencies - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - run: flake8 - - run: ufmt check . - - run: python3 -m fixit.cli.run_rules - - run: python -m slotscheck libcst - - run: ./check_copyright.sh + - name: Install hatch + run: pip install -U hatch + - run: hatch run lint + - run: hatch run fixtures # Run pyre typechecker typecheck: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 + with: + fetch-depth: 0 - uses: actions/setup-python@v4 with: cache: pip - cache-dependency-path: "**/requirements*.txt" + cache-dependency-path: "pyproject.toml" python-version: "3.10" - - name: Install Dependencies - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - name: Make sure Pyre uses the working copy - run: pip install -e . - - run: pyre --version - - run: pyre -n check - - run: python scripts/regenerate-fixtures.py - - run: git diff --exit-code + - name: Install hatch + run: pip install -U hatch + - run: hatch run typecheck # Upload test coverage coverage: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 + with: + fetch-depth: 0 - uses: actions/setup-python@v4 with: cache: pip - cache-dependency-path: "**/requirements*.txt" + cache-dependency-path: "pyproject.toml" python-version: "3.10" - - name: Install Dependencies - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Install hatch + run: pip install -U hatch - name: Generate Coverage run: | - coverage run setup.py test - coverage xml -i + hatch run coverage run setup.py test + hatch run coverage xml -i - uses: codecov/codecov-action@v3 with: files: coverage.xml @@ -116,16 +114,17 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 + with: + fetch-depth: 0 - uses: actions/setup-python@v4 with: cache: pip - cache-dependency-path: "**/requirements*.txt" + cache-dependency-path: "pyproject.toml" python-version: "3.10" - - name: Install Dependencies - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Install hatch + run: pip install -U hatch - uses: ts-graphviz/setup-graphviz@v1 - - run: sphinx-build docs/source/ docs/build/ + - run: hatch run docs - name: Archive Docs uses: actions/upload-artifact@v3 with: @@ -153,7 +152,7 @@ jobs: - uses: actions/setup-python@v4 with: cache: pip - cache-dependency-path: "**/requirements*.txt" + cache-dependency-path: "pyproject.toml" python-version: "3.10" - name: test uses: actions-rs/cargo@v1 diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index d8ae1d4a..b399ef5b 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -29,17 +29,15 @@ jobs: - uses: actions/setup-python@v4 with: cache: pip - cache-dependency-path: "**/requirements*.txt" + cache-dependency-path: "pyproject.toml" python-version: "3.10" - - name: Install Dependencies - run: | - pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - - name: Disable scmtools local scheme - run: >- - echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV + - name: Install hatch + run: pip install -U hatch - name: Build a source tarball + env: + LIBCST_NO_LOCAL_SCHEME: 1 run: >- - python -m + hatch run python -m build --sdist --outdir ${{ steps.download.outputs.download-path }} diff --git a/.gitignore b/.gitignore index faef172f..57628584 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.swp *.swo *.pyc +*.pyd *.pyo *.so *.egg-info/ diff --git a/.readthedocs.yml b/.readthedocs.yml index 7889c2c9..008477b4 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -15,8 +15,8 @@ build: python: install: - - requirements: requirements.txt - - requirements: requirements-dev.txt - method: pip path: . + extra_requirements: + - dev diff --git a/MANIFEST.in b/MANIFEST.in index 0f3912c6..17070b66 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ -include README.rst LICENSE CODE_OF_CONDUCT.md CONTRIBUTING.md requirements.txt requirements-dev.txt docs/source/*.rst libcst/py.typed +include README.rst LICENSE CODE_OF_CONDUCT.md CONTRIBUTING.md docs/source/*.rst libcst/py.typed include native/Cargo.toml recursive-include native * diff --git a/README.rst b/README.rst index 3b904cb2..4ad4cd67 100644 --- a/README.rst +++ b/README.rst @@ -148,49 +148,48 @@ Development You'll need a recent `Rust toolchain `_ for developing. -Then, start by setting up and activating a virtualenv: +We recommend using `hatch ` for running tests, linters, +etc. + +Then, start by setting up and building the project: .. code-block:: shell git clone git@github.com:Instagram/LibCST.git libcst cd libcst - python3 -m venv ../libcst-env/ # just an example, put this wherever you want - source ../libcst-env/bin/activate - pip install --upgrade pip # optional, if you have an old system version of pip - pip install -r requirements.txt -r requirements-dev.txt - # If you're done with the virtualenv, you can leave it by running: - deactivate + hatch env create -We use `ufmt `_ to format code. To format -changes to be conformant, run the following in the root: +To run the project's test suite, you can: .. code-block:: shell - ufmt format && python -m fixit.cli.apply_fix - -We use `slotscheck `_ to check the correctness -of class ``__slots__``. To check that slots are defined properly, run: - -.. code-block:: shell - - python -m slotscheck libcst - -To run all tests, you'll need to do the following in the root: - -.. code-block:: shell - - python -m unittest + hatch run test You can also run individual tests by using unittest and specifying a module like this: .. code-block:: shell - python -m unittest libcst.tests.test_batched_visitor + hatch run python -m unittest libcst.tests.test_batched_visitor See the `unittest documentation `_ for more examples of how to run tests. +We have multiple linters, including copyright checks and +`slotscheck `_ to check the correctness of class +``__slots__``. To run all of the linters: + +.. code-block:: shell + + hatch run lint + +We use `ufmt `_ to format code. To format +changes to be conformant, run the following in the root: + +.. code-block:: shell + + hatch run format + Building ~~~~~~~~ @@ -207,13 +206,11 @@ directory: cargo build -To build the ``libcst.native`` module and install ``libcst``, run this -from the root: +To rebuild the ``libcst.native`` module, from the repo root: .. code-block:: shell - pip uninstall -y libcst - pip install -e . + hatch env prune && hatch env create Type Checking ~~~~~~~~~~~~~ @@ -224,10 +221,7 @@ To verify types for the library, do the following in the root: .. code-block:: shell - pyre check - -*Note:* You may need to run the ``pip install -e .`` command prior -to type checking, see the section above on building. + hatch run typecheck Generating Documents ~~~~~~~~~~~~~~~~~~~~ @@ -236,7 +230,7 @@ To generate documents, do the following in the root: .. code-block:: shell - sphinx-build docs/source/ docs/build/ + hatch run docs Future ====== diff --git a/check_copyright.sh b/check_copyright.sh deleted file mode 100755 index 3f4eab75..00000000 --- a/check_copyright.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -set -eu -EXITCODE=0 -error() { echo "$1"; EXITCODE=1; } - -EXCEPTION_PATTERNS=( - "^native/libcst/tests/fixtures/" - "^libcst/_add_slots\.py$" - "^libcst/tests/test_\(e2e\|fuzz\)\.py$" - "^libcst/_parser/base_parser\.py$" - "^libcst/_parser/parso/utils\.py$" - "^libcst/_parser/parso/pgen2/\(generator\|grammar_parser\)\.py$" - "^libcst/_parser/parso/python/\(py_token\|tokenize\)\.py$" - "^libcst/_parser/parso/tests/test_\(fstring\|tokenize\|utils\)\.py$" -) - - -while read filename; do \ - if ! head -n 16 "$filename" | grep -q "Copyright (c) Meta Platforms, Inc. and affiliates."; then - error "Missing copyright in $filename" - fi -done < <( git ls-tree -r --name-only HEAD | grep "\(.py\|\.sh\|\.rs\)$" | \ - grep -v "${EXCEPTION_PATTERNS[@]/#/-e}" ) -exit $EXITCODE \ No newline at end of file diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index dd02e598..d0336d56 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -5,14 +5,18 @@ # +import platform import subprocess from pathlib import Path +from unittest import skipIf from libcst._parser.entrypoints import is_native from libcst.testing.utils import UnitTest class TestCodemodCLI(UnitTest): + # pyre-ignore - no idea why pyre is complaining about this + @skipIf(platform.system() == "Windows", "Windows") def test_codemod_formatter_error_input(self) -> None: rlt = subprocess.run( [ diff --git a/libcst/tests/__main__.py b/libcst/tests/__main__.py new file mode 100644 index 00000000..44e6bbe0 --- /dev/null +++ b/libcst/tests/__main__.py @@ -0,0 +1,15 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from unittest import main + +from libcst._parser.entrypoints import is_native + + +if __name__ == "__main__": + parser_type = "native" if is_native() else "pure" + print(f"running tests with {parser_type!r} parser") + + main(module=None, verbosity=2) diff --git a/pyproject.toml b/pyproject.toml index b3a63763..f24dd9d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,85 @@ +[build-system] +requires = ["setuptools", "wheel", "setuptools-rust"] + +[project] +name = "libcst" +description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." +readme = "README.rst" +dynamic = ["version"] +license = {file="LICENSE"} +classifiers = [ + "License :: OSI Approved :: MIT License", + "Topic :: Software Development :: Libraries", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +requires-python = ">=3.7" +dependencies = [ + "dataclasses>=0.6.0; python_version < '3.7'", + "typing_extensions>=3.7.4.2", + "typing_inspect>=0.4.0", + "pyyaml>=5.2", +] + +[project.optional-dependencies] +dev = [ + "black==22.12.0", + "coverage>=4.5.4", + "fixit==0.1.1", + "flake8>=3.7.8,<5", + "Sphinx>=5.1.1", + "hypothesis>=4.36.0", + "hypothesmith>=0.0.4", + "jupyter>=1.0.0", + "maturin>=0.8.3,<0.14", + "nbsphinx>=0.4.2", + "prompt-toolkit>=2.0.9", + "pyre-check==0.9.10; platform_system != 'Windows'", + "setuptools_scm>=6.0.1", + "sphinx-rtd-theme>=0.4.3", + "ufmt==2.0.1", + "usort==1.0.5", + "setuptools-rust>=1.5.2", + "slotscheck>=0.7.1", + "jinja2==3.1.2", +] + +[project.urls] +Documentation = "https://libcst.readthedocs.io/en/latest/" +Github = "https://github.com/Instagram/LibCST" +Changelog = "https://github.com/Instagram/LibCST/blob/main/CHANGELOG.md" + [tool.black] target-version = ["py37"] extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format -[tool.ufmt] -excludes = ["native/", "stubs/"] +[tool.hatch.envs.default] +features = ["dev"] + +[tool.hatch.envs.default.scripts] +docs = "sphinx-build -ab html docs/source docs/build" +fixtures = [ + "python scripts/regenerate-fixtures.py", + "git diff --exit-code", +] +format = "ufmt format libcst scripts" +lint = [ + "flake8 libcst", + "ufmt check libcst scripts", + "python -m slotscheck libcst", + "python scripts/check_copyright.py", +] +test = "python -m libcst.tests" +typecheck = [ + "pyre --version", + "pyre check", +] [tool.slotscheck] exclude-modules = '^libcst\.(testing|tests)' -[build-system] -requires = ["setuptools", "wheel", "setuptools-rust"] +[tool.ufmt] +excludes = ["native/", "stubs/"] diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 8155a99a..00000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,19 +0,0 @@ -black==22.12.0 -coverage>=4.5.4 -fixit==0.1.1 -flake8>=3.7.8,<5 -Sphinx>=5.1.1 -hypothesis>=4.36.0 -hypothesmith>=0.0.4 -jupyter>=1.0.0 -maturin>=0.8.3,<0.14 -nbsphinx>=0.4.2 -prompt-toolkit>=2.0.9 -pyre-check==0.9.10; platform_system != "Windows" -setuptools_scm>=6.0.1 -sphinx-rtd-theme>=0.4.3 -ufmt==2.0.1 -usort==1.0.5 -setuptools-rust>=1.5.2 -slotscheck>=0.7.1 -jinja2==3.1.2 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index cf8ff05c..00000000 --- a/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -dataclasses>=0.6.0; python_version < '3.7' -typing_extensions>=3.7.4.2 -typing_inspect>=0.4.0 -pyyaml>=5.2 diff --git a/scripts/check_copyright.py b/scripts/check_copyright.py new file mode 100644 index 00000000..47d90ec6 --- /dev/null +++ b/scripts/check_copyright.py @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import re +import sys +from pathlib import Path +from subprocess import run +from typing import Iterable, List, Pattern + +# Use the copyright header from this file as the benchmark for all files +EXPECTED_HEADER: str = "\n".join( + line for line in Path(__file__).read_text().splitlines()[:4] +) + +EXCEPTION_PATTERNS: List[Pattern[str]] = [ + re.compile(pattern) + for pattern in ( + r"^native/libcst/tests/fixtures/", + r"^libcst/_add_slots\.py$", + r"^libcst/tests/test_(e2e|fuzz)\.py$", + r"^libcst/_parser/base_parser\.py$", + r"^libcst/_parser/parso/utils\.py$", + r"^libcst/_parser/parso/pgen2/(generator|grammar_parser)\.py$", + r"^libcst/_parser/parso/python/(py_token|tokenize)\.py$", + r"^libcst/_parser/parso/tests/test_(fstring|tokenize|utils)\.py$", + ) +] + + +def tracked_files() -> Iterable[Path]: + proc = run( + ["git", "ls-tree", "-r", "--name-only", "HEAD"], + check=True, + capture_output=True, + encoding="utf-8", + ) + yield from ( + path + for line in proc.stdout.splitlines() + if not any(pattern.search(line) for pattern in EXCEPTION_PATTERNS) + if (path := Path(line)) and path.is_file() and path.suffix in (".py", ".sh") + ) + + +def main() -> None: + error = False + for path in tracked_files(): + content = path.read_text("utf-8") + if EXPECTED_HEADER not in content: + print(f"Missing or incomplete copyright in {path}") + error = True + sys.exit(1 if error else 0) + + +if __name__ == "__main__": + main() diff --git a/setup.py b/setup.py index b6aeb400..c81bbce0 100644 --- a/setup.py +++ b/setup.py @@ -3,24 +3,17 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. - -from os import environ, path +from os import environ import setuptools from setuptools_rust import Binding, RustExtension - -# Grab the readme so that our package stays in sync with github. -this_directory: str = path.abspath(path.dirname(__file__)) -with open(path.join(this_directory, "README.rst"), encoding="utf-8") as f: - long_description: str = f.read() - - def no_local_scheme(version: str) -> str: return "" setuptools.setup( + setup_requires=["setuptools-rust", "setuptools_scm"], use_scm_version={ "write_to": "libcst/_version.py", **( @@ -29,16 +22,6 @@ setuptools.setup( else {} ), }, - name="libcst", - description="A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs.", - long_description=long_description, - long_description_content_type="text/x-rst", - url="https://github.com/Instagram/LibCST", - project_urls={ - "Changelog": "https://github.com/Instagram/LibCST/blob/main/CHANGELOG.md", - "Documentation": "https://libcst.readthedocs.io/en/latest/", - }, - license="MIT", packages=setuptools.find_packages(), package_data={ "libcst": ["py.typed"], @@ -46,16 +29,6 @@ setuptools.setup( "libcst.codemod.tests": ["*"], }, test_suite="libcst", - python_requires=">=3.7", - setup_requires=["setuptools-rust", "setuptools_scm"], - install_requires=[dep.strip() for dep in open("requirements.txt").readlines()], - extras_require={ - "dev": [ - dep.strip() - for dep in open("requirements-dev.txt").readlines() - if "=" in dep - ], - }, rust_extensions=[ RustExtension( "libcst.native", @@ -63,14 +36,5 @@ setuptools.setup( binding=Binding.PyO3, ) ], - classifiers=[ - "License :: OSI Approved :: MIT License", - "Topic :: Software Development :: Libraries", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - ], zip_safe=False, # for mypy compatibility https://mypy.readthedocs.io/en/latest/installed_packages.html ) From 46509dd5e119ab3df7b53ef4eb281bde72a30409 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Mar 2023 11:53:50 +0000 Subject: [PATCH 353/632] Bump black from 22.12.0 to 23.1.0 (#860) --- libcst/_nodes/base.py | 2 -- libcst/_nodes/expression.py | 1 - libcst/_nodes/tests/test_list.py | 1 - libcst/_nodes/tests/test_set.py | 1 - libcst/codemod/commands/convert_format_to_fstring.py | 3 +-- libcst/codemod/commands/ensure_import_present.py | 1 - libcst/codemod/commands/rename.py | 1 - libcst/codemod/commands/strip_strings_from_types.py | 1 - libcst/codemod/commands/tests/test_add_pyre_directive.py | 1 - .../commands/tests/test_convert_format_to_fstring.py | 1 - .../commands/tests/test_convert_namedtuple_to_dataclass.py | 1 - libcst/codemod/commands/tests/test_convert_type_comments.py | 1 - libcst/codemod/commands/tests/test_fix_pyre_directives.py | 1 - libcst/codemod/commands/tests/test_noop.py | 1 - libcst/codemod/commands/tests/test_remove_pyre_directive.py | 2 -- libcst/codemod/commands/tests/test_rename.py | 5 ----- .../codemod/commands/tests/test_strip_strings_from_types.py | 1 - libcst/codemod/commands/unnecessary_format_string.py | 1 - libcst/codemod/tests/test_codemod.py | 2 -- libcst/codemod/tests/test_metadata.py | 2 -- libcst/codemod/visitors/_add_imports.py | 2 +- libcst/codemod/visitors/_apply_type_annotations.py | 1 - libcst/codemod/visitors/_gather_unused_imports.py | 2 +- libcst/codemod/visitors/tests/test_add_imports.py | 1 - libcst/codemod/visitors/tests/test_remove_imports.py | 5 ----- libcst/matchers/_matcher_base.py | 6 +++--- libcst/metadata/tests/test_name_provider.py | 3 +-- libcst/metadata/type_inference_provider.py | 1 - libcst/testing/utils.py | 1 - libcst/tool.py | 1 - pyproject.toml | 2 +- 31 files changed, 8 insertions(+), 47 deletions(-) diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index 03597641..4fe5edbf 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -109,7 +109,6 @@ def _clone(val: object) -> object: @dataclass(frozen=True) class CSTNode(ABC): - __slots__: ClassVar[Sequence[str]] = () def __post_init__(self) -> None: @@ -471,7 +470,6 @@ class CSTNode(ABC): class BaseLeaf(CSTNode, ABC): - __slots__ = () @property diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 2df1e3bc..f05a6a2f 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -535,7 +535,6 @@ StringQuoteLiteral = Literal['"', "'", '"""', "'''"] class _BasePrefixedString(BaseString, ABC): - __slots__ = () @property diff --git a/libcst/_nodes/tests/test_list.py b/libcst/_nodes/tests/test_list.py index 9096ced6..43e22df7 100644 --- a/libcst/_nodes/tests/test_list.py +++ b/libcst/_nodes/tests/test_list.py @@ -14,7 +14,6 @@ from libcst.testing.utils import data_provider class ListTest(CSTNodeTest): - # A lot of Element/StarredElement tests are provided by the tests for Tuple, so we # we don't need to duplicate them here. @data_provider( diff --git a/libcst/_nodes/tests/test_set.py b/libcst/_nodes/tests/test_set.py index bdf84955..335a4d3a 100644 --- a/libcst/_nodes/tests/test_set.py +++ b/libcst/_nodes/tests/test_set.py @@ -13,7 +13,6 @@ from libcst.testing.utils import data_provider class ListTest(CSTNodeTest): - # A lot of Element/StarredElement tests are provided by the tests for Tuple, so we # we don't need to duplicate them here. @data_provider( diff --git a/libcst/codemod/commands/convert_format_to_fstring.py b/libcst/codemod/commands/convert_format_to_fstring.py index a74b5342..ab98c0ea 100644 --- a/libcst/codemod/commands/convert_format_to_fstring.py +++ b/libcst/codemod/commands/convert_format_to_fstring.py @@ -219,7 +219,6 @@ class SwitchStringQuotesTransformer(ContextAwareTransformer): class ConvertFormatStringCommand(VisitorBasedCodemodCommand): - DESCRIPTION: str = "Converts instances of str.format() to f-string." @staticmethod @@ -271,7 +270,7 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): inserted_sequence: int = 0 stringnode = cst.ensure_type(extraction["string"], cst.SimpleString) tokens = _get_tokens(stringnode.raw_value) - for (literal_text, field_name, format_spec, conversion) in tokens: + for literal_text, field_name, format_spec, conversion in tokens: if literal_text: fstring.append(cst.FormattedStringText(literal_text)) if field_name is None: diff --git a/libcst/codemod/commands/ensure_import_present.py b/libcst/codemod/commands/ensure_import_present.py index a97fddfd..44dda822 100644 --- a/libcst/codemod/commands/ensure_import_present.py +++ b/libcst/codemod/commands/ensure_import_present.py @@ -11,7 +11,6 @@ from libcst.codemod.visitors import AddImportsVisitor class EnsureImportPresentCommand(MagicArgsCodemodCommand): - DESCRIPTION: str = ( "Given a module and possibly an entity in that module, add an import " + "as long as one does not already exist." diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 62809cc9..472d1e61 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -189,7 +189,6 @@ class RenameCommand(VisitorBasedCodemodCommand): if alias_name is not None: qual_name = f"{imported_module_name}.{alias_name}" if self.old_name == qual_name: - replacement_module = self.gen_replacement_module( imported_module_name ) diff --git a/libcst/codemod/commands/strip_strings_from_types.py b/libcst/codemod/commands/strip_strings_from_types.py index 6564d041..871d2513 100644 --- a/libcst/codemod/commands/strip_strings_from_types.py +++ b/libcst/codemod/commands/strip_strings_from_types.py @@ -14,7 +14,6 @@ from libcst.metadata import QualifiedNameProvider class StripStringsCommand(VisitorBasedCodemodCommand): - DESCRIPTION: str = ( "Converts string type annotations to 3.7-compatible forward references." ) diff --git a/libcst/codemod/commands/tests/test_add_pyre_directive.py b/libcst/codemod/commands/tests/test_add_pyre_directive.py index a8e7e45a..37e6f2f9 100644 --- a/libcst/codemod/commands/tests/test_add_pyre_directive.py +++ b/libcst/codemod/commands/tests/test_add_pyre_directive.py @@ -8,7 +8,6 @@ from libcst.codemod.commands.add_pyre_directive import AddPyreUnsafeCommand class TestAddPyreUnsafeCommand(CodemodTest): - TRANSFORM = AddPyreUnsafeCommand def test_add_to_file(self) -> None: diff --git a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py index b2209cdc..1a10303b 100644 --- a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py +++ b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py @@ -8,7 +8,6 @@ from libcst.codemod.commands.convert_format_to_fstring import ConvertFormatStrin class ConvertFormatStringCommandTest(CodemodTest): - TRANSFORM = ConvertFormatStringCommand def test_noop(self) -> None: diff --git a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py index 866f03c4..8e0b314d 100644 --- a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py +++ b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py @@ -10,7 +10,6 @@ from libcst.codemod.commands.convert_namedtuple_to_dataclass import ( class ConvertNamedTupleToDataclassCommandTest(CodemodTest): - TRANSFORM = ConvertNamedTupleToDataclassCommand def test_no_change(self) -> None: diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index 0e350238..2c5917d1 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -11,7 +11,6 @@ from libcst.codemod.commands.convert_type_comments import ConvertTypeComments class TestConvertTypeCommentsBase(CodemodTest): - maxDiff = 1500 TRANSFORM = ConvertTypeComments diff --git a/libcst/codemod/commands/tests/test_fix_pyre_directives.py b/libcst/codemod/commands/tests/test_fix_pyre_directives.py index 8871397a..2d07af12 100644 --- a/libcst/codemod/commands/tests/test_fix_pyre_directives.py +++ b/libcst/codemod/commands/tests/test_fix_pyre_directives.py @@ -8,7 +8,6 @@ from libcst.codemod.commands.fix_pyre_directives import FixPyreDirectivesCommand class TestFixPyreDirectivesCommand(CodemodTest): - TRANSFORM = FixPyreDirectivesCommand def test_no_need_to_fix_simple(self) -> None: diff --git a/libcst/codemod/commands/tests/test_noop.py b/libcst/codemod/commands/tests/test_noop.py index 12995f5c..fa586a3d 100644 --- a/libcst/codemod/commands/tests/test_noop.py +++ b/libcst/codemod/commands/tests/test_noop.py @@ -8,7 +8,6 @@ from libcst.codemod.commands.noop import NOOPCommand class TestNOOPCodemod(CodemodTest): - TRANSFORM = NOOPCommand def test_noop(self) -> None: diff --git a/libcst/codemod/commands/tests/test_remove_pyre_directive.py b/libcst/codemod/commands/tests/test_remove_pyre_directive.py index 74c1847c..c99f88ce 100644 --- a/libcst/codemod/commands/tests/test_remove_pyre_directive.py +++ b/libcst/codemod/commands/tests/test_remove_pyre_directive.py @@ -11,7 +11,6 @@ from libcst.codemod.commands.remove_pyre_directive import ( class TestRemovePyreStrictCommand(CodemodTest): - TRANSFORM = RemovePyreStrictCommand def test_remove_from_file(self) -> None: @@ -97,7 +96,6 @@ class TestRemovePyreStrictCommand(CodemodTest): class TestRemovePyreUnsafeCommand(CodemodTest): - TRANSFORM = RemovePyreUnsafeCommand def test_remove_from_file(self) -> None: diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index f97062b7..2c9daed1 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -10,11 +10,9 @@ from libcst.codemod.commands.rename import RenameCommand class TestRenameCommand(CodemodTest): - TRANSFORM = RenameCommand def test_rename_name(self) -> None: - before = """ from foo import bar @@ -31,7 +29,6 @@ class TestRenameCommand(CodemodTest): self.assertCodemod(before, after, old_name="foo.bar", new_name="baz.qux") def test_rename_name_asname(self) -> None: - before = """ from foo import bar as bla @@ -73,7 +70,6 @@ class TestRenameCommand(CodemodTest): ) def test_rename_attr(self) -> None: - before = """ import a.b @@ -95,7 +91,6 @@ class TestRenameCommand(CodemodTest): ) def test_rename_attr_asname(self) -> None: - before = """ import foo as bar diff --git a/libcst/codemod/commands/tests/test_strip_strings_from_types.py b/libcst/codemod/commands/tests/test_strip_strings_from_types.py index 64a2fd10..3c3893b1 100644 --- a/libcst/codemod/commands/tests/test_strip_strings_from_types.py +++ b/libcst/codemod/commands/tests/test_strip_strings_from_types.py @@ -8,7 +8,6 @@ from libcst.codemod.commands.strip_strings_from_types import StripStringsCommand class TestStripStringsCodemod(CodemodTest): - TRANSFORM = StripStringsCommand def test_noop(self) -> None: diff --git a/libcst/codemod/commands/unnecessary_format_string.py b/libcst/codemod/commands/unnecessary_format_string.py index d3bcd1c9..2320af17 100644 --- a/libcst/codemod/commands/unnecessary_format_string.py +++ b/libcst/codemod/commands/unnecessary_format_string.py @@ -9,7 +9,6 @@ from libcst.codemod import VisitorBasedCodemodCommand class UnnecessaryFormatString(VisitorBasedCodemodCommand): - DESCRIPTION: str = ( "Converts f-strings which perform no formatting to regular strings." ) diff --git a/libcst/codemod/tests/test_codemod.py b/libcst/codemod/tests/test_codemod.py index 754f35fe..ba68b94b 100644 --- a/libcst/codemod/tests/test_codemod.py +++ b/libcst/codemod/tests/test_codemod.py @@ -23,7 +23,6 @@ class SimpleCodemod(Codemod): class TestSkipDetection(CodemodTest): - TRANSFORM = SimpleCodemod def test_detect_skip(self) -> None: @@ -87,7 +86,6 @@ class IncrementCodemod(Codemod): class TestMultipass(CodemodTest): - TRANSFORM = IncrementCodemod def test_multi_iterations(self) -> None: diff --git a/libcst/codemod/tests/test_metadata.py b/libcst/codemod/tests/test_metadata.py index 1a99b3dd..eff3a228 100644 --- a/libcst/codemod/tests/test_metadata.py +++ b/libcst/codemod/tests/test_metadata.py @@ -13,7 +13,6 @@ from libcst.testing.utils import UnitTest class TestingCollector(ContextAwareVisitor): - METADATA_DEPENDENCIES = (PositionProvider,) def visit_Pass(self, node: cst.Pass) -> None: @@ -22,7 +21,6 @@ class TestingCollector(ContextAwareVisitor): class TestingTransform(ContextAwareTransformer): - METADATA_DEPENDENCIES = (PositionProvider,) def visit_FunctionDef(self, node: cst.FunctionDef) -> None: diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index 9c4f04c5..8081adf9 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -183,7 +183,7 @@ class AddImportsVisitor(ContextAwareTransformer): if module in self.module_aliases and self.module_aliases[module] == alias: del self.module_aliases[module] for module, aliases in gatherer.alias_mapping.items(): - for (obj, alias) in aliases: + for obj, alias in aliases: if ( module in self.alias_mapping and (obj, alias) in self.alias_mapping[module] diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 7811aa63..c9090503 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -1301,7 +1301,6 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): original_node: cst.Assign, updated_node: cst.Assign, ) -> Union[cst.Assign, cst.AnnAssign]: - self.current_assign = None if len(original_node.targets) > 1: diff --git a/libcst/codemod/visitors/_gather_unused_imports.py b/libcst/codemod/visitors/_gather_unused_imports.py index d6211509..01243cae 100644 --- a/libcst/codemod/visitors/_gather_unused_imports.py +++ b/libcst/codemod/visitors/_gather_unused_imports.py @@ -108,7 +108,7 @@ class GatherUnusedImportsVisitor(ContextAwareVisitor): Override this in a subclass for additional filtering. """ unused_imports = set() - for (alias, parent) in candidates: + for alias, parent in candidates: scope = self.get_metadata(ScopeProvider, parent) if scope is None: continue diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index 68437522..0682fa51 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -8,7 +8,6 @@ from libcst.codemod.visitors import AddImportsVisitor, ImportItem class TestAddImportsCodemod(CodemodTest): - TRANSFORM = AddImportsVisitor def test_noop(self) -> None: diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index 11db2e9f..6e51c515 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -17,7 +17,6 @@ from libcst.testing.utils import data_provider class TestRemoveImportsCodemod(CodemodTest): - TRANSFORM = RemoveImportsVisitor def test_noop(self) -> None: @@ -757,7 +756,6 @@ class TestRemoveImportsCodemod(CodemodTest): """ class RemoveBarTransformer(VisitorBasedCodemodCommand): - METADATA_DEPENDENCIES = (QualifiedNameProvider, ScopeProvider) @m.leave( @@ -821,7 +819,6 @@ class TestRemoveImportsCodemod(CodemodTest): """ class RemoveImportTransformer(VisitorBasedCodemodCommand): - METADATA_DEPENDENCIES = (QualifiedNameProvider, ScopeProvider) def visit_ImportFrom(self, node: cst.ImportFrom) -> None: @@ -860,7 +857,6 @@ class TestRemoveImportsCodemod(CodemodTest): """ class RemoveImportTransformer(VisitorBasedCodemodCommand): - METADATA_DEPENDENCIES = (QualifiedNameProvider, ScopeProvider) def visit_Import(self, node: cst.Import) -> None: @@ -892,7 +888,6 @@ class TestRemoveImportsCodemod(CodemodTest): """ class RemoveImportTransformer(VisitorBasedCodemodCommand): - METADATA_DEPENDENCIES = (QualifiedNameProvider, ScopeProvider) def visit_ImportFrom(self, node: cst.ImportFrom) -> None: diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 3e984860..666ceab0 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -1524,9 +1524,9 @@ def _matches( return _node_matches(node, matcher, metadata_lookup) -def _construct_metadata_fetcher_null() -> Callable[ - [meta.ProviderT, libcst.CSTNode], object -]: +def _construct_metadata_fetcher_null() -> ( + Callable[[meta.ProviderT, libcst.CSTNode], object] +): def _fetch(provider: meta.ProviderT, node: libcst.CSTNode) -> NoReturn: raise LookupError( f"{provider.__name__} is not resolved; did you forget a MetadataWrapper?" diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index c1ba59bc..80215dc6 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -24,7 +24,6 @@ from libcst.testing.utils import data_provider, UnitTest class QNameVisitor(cst.CSTVisitor): - METADATA_DEPENDENCIES = (QualifiedNameProvider,) def __init__(self) -> None: @@ -543,7 +542,7 @@ class FullyQualifiedNameProviderTest(UnitTest): def test_local_qualification(self) -> None: module_name = "some.test.module" package_name = "some.test" - for (name, expected) in [ + for name, expected in [ (".foo", "some.test.foo"), ("..bar", "some.bar"), ("foo", "some.test.module.foo"), diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index 340d1c51..6f555fdf 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -63,7 +63,6 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): try: stdout, stderr, return_code = run_command(cmd_args, timeout=timeout) except subprocess.TimeoutExpired as exc: - raise exc if return_code != 0: diff --git a/libcst/testing/utils.py b/libcst/testing/utils.py index f2557b66..8a320571 100644 --- a/libcst/testing/utils.py +++ b/libcst/testing/utils.py @@ -64,7 +64,6 @@ def populate_data_provider_tests(dct: Dict[str, Any]) -> None: member_name, member, DATA_PROVIDER_DATA_ATTR_NAME ) if provider_data is not None: - for description, data in ( provider_data.items() if isinstance(provider_data, dict) diff --git a/libcst/tool.py b/libcst/tool.py index 4101d70c..a0fa5bbc 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -541,7 +541,6 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 # Sepcify target version for black formatter if os.path.basename(config["formatter"][0]) in ("black", "black.exe"): - parsed_version = parse_version_string(args.python_version) config["formatter"] = [ diff --git a/pyproject.toml b/pyproject.toml index f24dd9d7..cb9cc8e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ [project.optional-dependencies] dev = [ - "black==22.12.0", + "black==23.1.0", "coverage>=4.5.4", "fixit==0.1.1", "flake8>=3.7.8,<5", From 9381fee9ab1696ba8eec19d860957d8453f2d15c Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Fri, 24 Mar 2023 12:03:11 +0200 Subject: [PATCH 354/632] Use subprocess.DEVNULL instead of opening os.devnull by hand (#897) --- libcst/codegen/generate.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/libcst/codegen/generate.py b/libcst/codegen/generate.py index 74a418f5..33d89b87 100644 --- a/libcst/codegen/generate.py +++ b/libcst/codegen/generate.py @@ -25,8 +25,11 @@ from libcst.codegen.transforms import ( def format_file(fname: str) -> None: - with open(os.devnull, "w") as devnull: - subprocess.check_call(["ufmt", "format", fname], stdout=devnull, stderr=devnull) + subprocess.check_call( + ["ufmt", "format", fname], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) def clean_generated_code(code: str) -> str: @@ -65,12 +68,11 @@ def codegen_visitors() -> None: # Now, see if the file we generated causes any import errors # by attempting to run codegen again in a new process. - with open(os.devnull, "w") as devnull: - subprocess.check_call( - ["python3", "-m", "libcst.codegen.gen_visitor_functions"], - cwd=base, - stdout=devnull, - ) + subprocess.check_call( + ["python3", "-m", "libcst.codegen.gen_visitor_functions"], + cwd=base, + stdout=subprocess.DEVNULL, + ) # If it worked, lets format the file format_file(visitors_file) From c016df46cd0dcfeb208eea5e7357566092cc4fc9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 10:55:42 +0100 Subject: [PATCH 355/632] Bump bumpalo from 3.10.0 to 3.12.0 in /native (#856) Bumps [bumpalo](https://github.com/fitzgen/bumpalo) from 3.10.0 to 3.12.0. - [Release notes](https://github.com/fitzgen/bumpalo/releases) - [Changelog](https://github.com/fitzgen/bumpalo/blob/main/CHANGELOG.md) - [Commits](https://github.com/fitzgen/bumpalo/compare/3.10.0...3.12.0) --- updated-dependencies: - dependency-name: bumpalo dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 10c8596f..92ba4b04 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -60,9 +60,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.10.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "cast" From ae42deed9bfb519749e07615e229051aeaf04a3b Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Mon, 27 Mar 2023 12:56:25 +0300 Subject: [PATCH 356/632] Ensure current Python interpreter is used for subprocesses (#898) --- libcst/codegen/generate.py | 2 +- libcst/codemod/tests/test_codemod_cli.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/libcst/codegen/generate.py b/libcst/codegen/generate.py index 33d89b87..92f13176 100644 --- a/libcst/codegen/generate.py +++ b/libcst/codegen/generate.py @@ -69,7 +69,7 @@ def codegen_visitors() -> None: # Now, see if the file we generated causes any import errors # by attempting to run codegen again in a new process. subprocess.check_call( - ["python3", "-m", "libcst.codegen.gen_visitor_functions"], + [sys.executable, "-m", "libcst.codegen.gen_visitor_functions"], cwd=base, stdout=subprocess.DEVNULL, ) diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index d0336d56..b8d3d79f 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -7,6 +7,7 @@ import platform import subprocess +import sys from pathlib import Path from unittest import skipIf @@ -20,7 +21,7 @@ class TestCodemodCLI(UnitTest): def test_codemod_formatter_error_input(self) -> None: rlt = subprocess.run( [ - "python", + sys.executable, "-m", "libcst.tool", "codemod", From 4f810dbc1343da04db8eb3fee5300490fd7a334b Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Mon, 27 Mar 2023 12:59:48 +0300 Subject: [PATCH 357/632] Allow running codemods without configuring in YAML (#879) * Simplify command specifier parsing * Allow running codemods without configuring in YAML This enables codemodding things by just plonking a CodemodCommand class into any old importable module and running `python -m libcst.tool codemod -x some_module.SomeClass ...` --- libcst/codemod/tests/test_codemod_cli.py | 18 ++++++ libcst/tool.py | 81 ++++++++++++++---------- 2 files changed, 65 insertions(+), 34 deletions(-) diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index b8d3d79f..0309c74a 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -45,3 +45,21 @@ class TestCodemodCLI(UnitTest): "error: cannot format -: Cannot parse: 13:10: async with AsyncExitStack() as stack:", rlt.stderr.decode("utf-8"), ) + + def test_codemod_external(self) -> None: + # Test running the NOOP command as an "external command" + # against this very file. + output = subprocess.check_output( + [ + sys.executable, + "-m", + "libcst.tool", + "codemod", + "-x", # external module + "libcst.codemod.commands.noop.NOOPCommand", + str(Path(__file__)), + ], + encoding="utf-8", + stderr=subprocess.STDOUT, + ) + assert "Finished codemodding 1 files!" in output diff --git a/libcst/tool.py b/libcst/tool.py index a0fa5bbc..5aa4d12f 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -391,38 +391,49 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 # full parser below once we know the command and have added its arguments. parser = argparse.ArgumentParser(add_help=False, fromfile_prefix_chars="@") parser.add_argument("command", metavar="COMMAND", type=str, nargs="?", default=None) + ext_action = parser.add_argument( + "-x", + "--external", + action="store_true", + default=False, + help="Interpret `command` as just a module/class specifier", + ) args, _ = parser.parse_known_args(command_args) # Now, try to load the class and get its arguments for help purposes. if args.command is not None: - command_path = args.command.split(".") - if len(command_path) < 2: + command_module_name, _, command_class_name = args.command.rpartition(".") + if not (command_module_name and command_class_name): print(f"{args.command} is not a valid codemod command", file=sys.stderr) return 1 - command_module_name, command_class_name = ( - ".".join(command_path[:-1]), - command_path[-1], - ) - command_class = None - for module in config["modules"]: - try: - command_class = getattr( - importlib.import_module(f"{module}.{command_module_name}"), - command_class_name, - ) - break - # Only swallow known import errors, show the rest of the exceptions - # to the user who is trying to run the codemod. - except AttributeError: - continue - except ModuleNotFoundError: - continue - if command_class is None: - print( - f"Could not find {command_module_name} in any configured modules", - file=sys.stderr, + if args.external: + # There's no error handling here on purpose; if the user opted in for `-x`, + # they'll probably want to see the exact import error too. + command_class = getattr( + importlib.import_module(command_module_name), + command_class_name, ) - return 1 + else: + command_class = None + for module in config["modules"]: + try: + command_class = getattr( + importlib.import_module(f"{module}.{command_module_name}"), + command_class_name, + ) + break + # Only swallow known import errors, show the rest of the exceptions + # to the user who is trying to run the codemod. + except AttributeError: + continue + except ModuleNotFoundError: + continue + if command_class is None: + print( + f"Could not find {command_module_name} in any configured modules", + file=sys.stderr, + ) + return 1 else: # Dummy, specifically to allow for running --help with no arguments. command_class = CodemodCommand @@ -437,6 +448,7 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 prog=f"{proc_name} codemod", fromfile_prefix_chars="@", ) + parser._add_action(ext_action) parser.add_argument( "command", metavar="COMMAND", @@ -522,20 +534,21 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 k: v for k, v in vars(args).items() if k - not in [ + not in { "command", - "path", - "unified_diff", - "jobs", - "python_version", + "external", + "hide_blacklisted_warnings", + "hide_generated_warnings", + "hide_progress", "include_generated", "include_stubs", + "jobs", "no_format", + "path", + "python_version", "show_successes", - "hide_generated_warnings", - "hide_blacklisted_warnings", - "hide_progress", - ] + "unified_diff", + } } command_instance = command_class(CodemodContext(), **codemod_args) From f936db240f53e210f6dd70215523e24bbdb53bf2 Mon Sep 17 00:00:00 2001 From: Rebecca Chen Date: Wed, 5 Apr 2023 14:23:53 -0700 Subject: [PATCH 358/632] Fix ApplyTypeAnnotationsVisitor behavior on attribute assignments. (#903) * Fixes an issue where ApplyTypeAnnotationsVisitor would crash on code like `SomeClass.some_attribute = 42` with a "Name is not a valid identifier" error message. * Changes the above-mentioned error message to include the bad name in the message, for easier debugging. * Adds tests for all valid assignment targets, as described here: https://libcst.readthedocs.io/en/latest/nodes.html#libcst.BaseAssignTargetExpression. --- libcst/_nodes/expression.py | 2 +- .../visitors/_apply_type_annotations.py | 2 +- .../tests/test_apply_type_annotations.py | 88 +++++++++++++++++++ 3 files changed, 90 insertions(+), 2 deletions(-) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index f05a6a2f..54202347 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -354,7 +354,7 @@ class Name(BaseAssignTargetExpression, BaseDelTargetExpression): if len(self.value) == 0: raise CSTValidationError("Cannot have empty name identifier.") if not self.value.isidentifier(): - raise CSTValidationError("Name is not a valid identifier.") + raise CSTValidationError(f"Name {self.value!r} is not a valid identifier.") def _codegen_impl(self, state: CodegenState) -> None: with self._parenthesize(state): diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index c9090503..a727e897 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -1008,7 +1008,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.qualifier.append(name) if ( self._qualifier_name() in self.annotations.attributes - and not isinstance(only_target, cst.Subscript) + and not isinstance(only_target, (cst.Attribute, cst.Subscript)) ): annotation = self.annotations.attributes[self._qualifier_name()] self.qualifier.pop() diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index aa05ab73..d237709a 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -1745,3 +1745,91 @@ class TestApplyAnnotationsVisitor(CodemodTest): self.run_test_case_with_flags( stub=stub, before=before, after=after, always_qualify_annotations=True ) + + @data_provider( + { + "attribute": ( + """ + class C: + x: int + """, + """ + class C: + x = 0 + C.x = 1 + """, + """ + class C: + x: int = 0 + C.x = 1 + """, + ), + "subscript": ( + """ + d: dict[str, int] + """, + """ + d = {} + d["k"] = 0 + """, + """ + d: dict[str, int] = {} + d["k"] = 0 + """, + ), + "starred": ( + """ + a: int + b: list[int] + """, + """ + a, *b = [1, 2, 3] + """, + """ + a: int + b: list[int] + + a, *b = [1, 2, 3] + """, + ), + "name": ( + """ + a: int + """, + """ + a = 0 + """, + """ + a: int = 0 + """, + ), + "list": ( + """ + a: int + """, + """ + [a] = [0] + """, + """ + a: int + + [a] = [0] + """, + ), + "tuple": ( + """ + a: int + """, + """ + (a,) = [0] + """, + """ + a: int + + (a,) = [0] + """, + ), + } + ) + def test_valid_assign_expressions(self, stub: str, before: str, after: str) -> None: + self.run_simple_test_case(stub=stub, before=before, after=after) From f0a4d62c3b174084d253c0fa413ee8d71bc718d3 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Thu, 13 Apr 2023 14:42:57 -0700 Subject: [PATCH 359/632] Fix spelling and grammar in some comments (#908) I'm going to go ahead and land this, I don't think it needs review --- libcst/codemod/commands/convert_type_comments.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index 808edec6..8335160e 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -706,7 +706,7 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): # able to extract type information. This is done via mutable state and the # usual visitor pattern. # (B) we also manually reach down to the first statement inside of the - # funciton body and aggressively strip type comments from leading + # function body and aggressively strip type comments from leading # whitespaces # # PEP 484 underspecifies how to apply type comments to (non-static) @@ -786,7 +786,7 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): self, node: cst.FunctionDef, ) -> None: - "Turn off aggressive type comment removal when we've leaved the header." + "Turn off aggressive type comment removal when we've left the header." self.aggressively_strip_type_comments = False def leave_IndentedBlock( From 2055342fd6ebbb7476f9d1f822bbb78d6f131cfe Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 21 Apr 2023 06:38:19 -0600 Subject: [PATCH 360/632] Support PEP 604 in ApplyTypeAnnotationsVisitor (#868) --- .../visitors/_apply_type_annotations.py | 245 +++++++----------- .../tests/test_apply_type_annotations.py | 79 +++++- 2 files changed, 170 insertions(+), 154 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index a727e897..2f2e3ac9 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -255,35 +255,38 @@ class ImportedSymbolCollector(m.MatcherDecoratableVisitor): super().__init__() self.existing_imports: Set[str] = existing_imports self.imported_symbols: Dict[str, Set[ImportedSymbol]] = defaultdict(set) + self.in_annotation: bool = False + + def visit_Annotation(self, node: cst.Annotation) -> None: + self.in_annotation = True + + def leave_Annotation(self, original_node: cst.Annotation) -> None: + self.in_annotation = False def visit_ClassDef(self, node: cst.ClassDef) -> None: for base in node.bases: value = base.value if isinstance(value, NAME_OR_ATTRIBUTE): self._handle_NameOrAttribute(value) - elif isinstance(value, cst.Subscript): - self._handle_Subscript(value) - def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: - if node.returns is not None: - self._handle_Annotation(annotation=node.returns) - self._handle_Parameters(node.params) + def visit_Name(self, node: cst.Name) -> None: + if self.in_annotation: + self._handle_NameOrAttribute(node) - # pyi files don't support inner functions, return False to stop the traversal. - return False + def visit_Attribute(self, node: cst.Attribute) -> None: + if self.in_annotation: + self._handle_NameOrAttribute(node) - def visit_AnnAssign(self, node: cst.AnnAssign) -> None: - self._handle_Annotation(annotation=node.annotation) - - # Handler functions. - # - # These ultimately all call _handle_NameOrAttribute, which adds the - # qualified name to the list of imported symbols + def visit_Subscript(self, node: cst.Subscript) -> bool: + if isinstance(node.value, NAME_OR_ATTRIBUTE): + return True + return _get_unique_qualified_name(self, node) not in ("Type", "typing.Type") def _handle_NameOrAttribute( self, node: NameOrAttribute, ) -> None: + # Adds the qualified name to the list of imported symbols obj = sym = None # keep pyre happy if isinstance(node, cst.Name): obj = None @@ -304,48 +307,6 @@ class ImportedSymbolCollector(m.MatcherDecoratableVisitor): ) self.imported_symbols[sym].add(mod) - def _handle_Index(self, slice: cst.Index) -> None: - value = slice.value - if isinstance(value, cst.Subscript): - self._handle_Subscript(value) - elif isinstance(value, cst.Attribute): - self._handle_NameOrAttribute(value) - - def _handle_Subscript(self, node: cst.Subscript) -> None: - value = node.value - if isinstance(value, NAME_OR_ATTRIBUTE): - self._handle_NameOrAttribute(value) - else: - raise ValueError("Expected any indexed type to have") - if _get_unique_qualified_name(self, node) in ("Type", "typing.Type"): - return - slice = node.slice - if isinstance(slice, tuple): - for item in slice: - if isinstance(item.slice.value, NAME_OR_ATTRIBUTE): - self._handle_NameOrAttribute(item.slice.value) - else: - if isinstance(item.slice, cst.Index): - self._handle_Index(item.slice) - elif isinstance(slice, cst.Index): - self._handle_Index(slice) - - def _handle_Annotation(self, annotation: cst.Annotation) -> None: - node = annotation.annotation - if isinstance(node, cst.Subscript): - self._handle_Subscript(node) - elif isinstance(node, NAME_OR_ATTRIBUTE): - self._handle_NameOrAttribute(node) - elif isinstance(node, cst.SimpleString): - pass - else: - raise ValueError(f"Unexpected annotation node: {node}") - - def _handle_Parameters(self, parameters: cst.Parameters) -> None: - for parameter in list(parameters.params): - if parameter.annotation is not None: - self._handle_Annotation(annotation=parameter.annotation) - class TypeCollector(m.MatcherDecoratableVisitor): """ @@ -392,9 +353,9 @@ class TypeCollector(m.MatcherDecoratableVisitor): for base in node.bases: value = base.value if isinstance(value, NAME_OR_ATTRIBUTE): - new_value = self._handle_NameOrAttribute(value) + new_value = value.visit(_TypeCollectorDequalifier(self)) elif isinstance(value, cst.Subscript): - new_value = self._handle_Subscript(value) + new_value = value.visit(_TypeCollectorDequalifier(self)) else: start = self.get_metadata(PositionProvider, node).start raise ValueError( @@ -421,7 +382,12 @@ class TypeCollector(m.MatcherDecoratableVisitor): self.qualifier.append(node.name.value) returns = node.returns return_annotation = ( - self._handle_Annotation(annotation=returns) if returns is not None else None + returns.visit(_TypeCollectorDequalifier(self)) + if returns is not None + else None + ) + assert return_annotation is None or isinstance( + return_annotation, cst.Annotation ) parameter_annotations = self._handle_Parameters(node.params) name = ".".join(self.qualifier) @@ -446,7 +412,8 @@ class TypeCollector(m.MatcherDecoratableVisitor): name = get_full_name_for_node(node.target) if name is not None: self.qualifier.append(name) - annotation_value = self._handle_Annotation(annotation=node.annotation) + annotation_value = node.annotation.visit(_TypeCollectorDequalifier(self)) + assert isinstance(annotation_value, cst.Annotation) self.annotations.attributes[".".join(self.qualifier)] = annotation_value return True @@ -541,97 +508,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): return False return False - # Handler functions. - # - # Each of these does one of two things, possibly recursively, over some - # valid CST node for a static type: - # - process the qualified name and ensure we will add necessary imports - # - dequalify the node - - def _handle_NameOrAttribute( - self, - node: NameOrAttribute, - ) -> Union[cst.Name, cst.Attribute]: - qualified_name = _get_unique_qualified_name(self, node) - should_qualify = self._handle_qualification_and_should_qualify( - qualified_name, node - ) - self.annotations.names.add(qualified_name) - if should_qualify: - qualified_node = ( - cst.parse_module(qualified_name) if isinstance(node, cst.Name) else node - ) - return qualified_node # pyre-ignore[7] - else: - dequalified_node = node.attr if isinstance(node, cst.Attribute) else node - return dequalified_node - - def _handle_Index( - self, - slice: cst.Index, - ) -> cst.Index: - value = slice.value - if isinstance(value, cst.Subscript): - return slice.with_changes(value=self._handle_Subscript(value)) - elif isinstance(value, cst.Attribute): - return slice.with_changes(value=self._handle_NameOrAttribute(value)) - else: - if isinstance(value, cst.SimpleString): - self.annotations.names.add(_get_string_value(value)) - return slice - - def _handle_Subscript( - self, - node: cst.Subscript, - ) -> cst.Subscript: - value = node.value - if isinstance(value, NAME_OR_ATTRIBUTE): - new_node = node.with_changes(value=self._handle_NameOrAttribute(value)) - else: - raise ValueError("Expected any indexed type to have") - if _get_unique_qualified_name(self, node) in ("Type", "typing.Type"): - # Note: we are intentionally not handling qualification of - # anything inside `Type` because it's common to have nested - # classes, which we cannot currently distinguish from classes - # coming from other modules, appear here. - return new_node - slice = node.slice - if isinstance(slice, tuple): - new_slice = [] - for item in slice: - value = item.slice.value - if isinstance(value, NAME_OR_ATTRIBUTE): - name = self._handle_NameOrAttribute(item.slice.value) - new_index = item.slice.with_changes(value=name) - new_slice.append(item.with_changes(slice=new_index)) - else: - if isinstance(item.slice, cst.Index): - new_index = item.slice.with_changes( - value=self._handle_Index(item.slice) - ) - item = item.with_changes(slice=new_index) - new_slice.append(item) - return new_node.with_changes(slice=tuple(new_slice)) - elif isinstance(slice, cst.Index): - new_slice = self._handle_Index(slice) - return new_node.with_changes(slice=new_slice) - else: - return new_node - - def _handle_Annotation( - self, - annotation: cst.Annotation, - ) -> cst.Annotation: - node = annotation.annotation - if isinstance(node, cst.SimpleString): - self.annotations.names.add(_get_string_value(node)) - return annotation - elif isinstance(node, cst.Subscript): - return cst.Annotation(annotation=self._handle_Subscript(node)) - elif isinstance(node, NAME_OR_ATTRIBUTE): - return cst.Annotation(annotation=self._handle_NameOrAttribute(node)) - else: - raise ValueError(f"Unexpected annotation node: {node}") + # Handler functions def _handle_Parameters( self, @@ -645,7 +522,7 @@ class TypeCollector(m.MatcherDecoratableVisitor): annotation = parameter.annotation if annotation is not None: parameter = parameter.with_changes( - annotation=self._handle_Annotation(annotation=annotation) + annotation=annotation.visit(_TypeCollectorDequalifier(self)) ) updated_parameters.append(parameter) return updated_parameters @@ -653,6 +530,68 @@ class TypeCollector(m.MatcherDecoratableVisitor): return parameters.with_changes(params=update_annotations(parameters.params)) +class _TypeCollectorDequalifier(cst.CSTTransformer): + def __init__(self, type_collector: "TypeCollector") -> None: + self.type_collector = type_collector + + def leave_Name(self, original_node: cst.Name, updated_node: cst.Name) -> cst.Name: + qualified_name = _get_unique_qualified_name(self.type_collector, original_node) + should_qualify = self.type_collector._handle_qualification_and_should_qualify( + qualified_name, original_node + ) + self.type_collector.annotations.names.add(qualified_name) + if should_qualify: + qualified_node = cst.parse_module(qualified_name) + return qualified_node # pyre-ignore[7] + else: + return original_node + + def visit_Attribute(self, node: cst.Attribute) -> bool: + return False + + def leave_Attribute( + self, original_node: cst.Attribute, updated_node: cst.Attribute + ) -> cst.BaseExpression: + qualified_name = _get_unique_qualified_name(self.type_collector, original_node) + should_qualify = self.type_collector._handle_qualification_and_should_qualify( + qualified_name, original_node + ) + self.type_collector.annotations.names.add(qualified_name) + if should_qualify: + return original_node + else: + return original_node.attr + + def leave_Index( + self, original_node: cst.Index, updated_node: cst.Index + ) -> cst.Index: + if isinstance(original_node.value, cst.SimpleString): + self.type_collector.annotations.names.add( + _get_string_value(original_node.value) + ) + return updated_node + + def visit_Subscript(self, node: cst.Subscript) -> bool: + return _get_unique_qualified_name(self.type_collector, node) not in ( + "Type", + "typing.Type", + ) + + def leave_Subscript( + self, original_node: cst.Subscript, updated_node: cst.Subscript + ) -> cst.Subscript: + if _get_unique_qualified_name(self.type_collector, original_node) in ( + "Type", + "typing.Type", + ): + # Note: we are intentionally not handling qualification of + # anything inside `Type` because it's common to have nested + # classes, which we cannot currently distinguish from classes + # coming from other modules, appear here. + return original_node.with_changes(value=original_node.value.visit(self)) + return updated_node + + @dataclass class AnnotationCounts: global_annotations: int = 0 diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index d237709a..3ef92aed 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -469,7 +469,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): ), "deeply_nested_example_with_multiline_annotation": ( """ - def foo(x: int)-> Union[ + def foo(x: int) -> Union[ Coroutine[Any, Any, django.http.response.HttpResponse], str ]: ... @@ -1119,6 +1119,83 @@ class TestApplyAnnotationsVisitor(CodemodTest): overwrite_existing_annotations=True, ) + @data_provider( + { + "pep_604": ( + """ + def f(a: int | str, b: int | list[int | list[int | str]]) -> str: ... + """, + """ + def f(a, b): + return 'hello' + """, + """ + def f(a: int | str, b: int | list[int | list[int | str]]) -> str: + return 'hello' + """, + ), + "pep_604_import": ( + """ + from typing import Callable + from collections.abc import Sequence + def f(a: int | str, b: int | list[int | Callable[[str], Sequence]]) -> str: ... + """, + """ + def f(a, b): + return 'hello' + """, + """ + from collections.abc import Sequence + from typing import Callable + + def f(a: int | str, b: int | list[int | Callable[[str], Sequence]]) -> str: + return 'hello' + """, + ), + } + ) + def test_annotate_functions_pep_604( + self, stub: str, before: str, after: str + ) -> None: + self.run_test_case_with_flags( + stub=stub, + before=before, + after=after, + overwrite_existing_annotations=True, + ) + + @data_provider( + { + "import_inside_list": ( + """ + from typing import Callable + from collections.abc import Sequence + def f(a: Callable[[Sequence[int]], int], b: int) -> str: ... + """, + """ + def f(a, b): + return 'hello' + """, + """ + from collections.abc import Sequence + from typing import Callable + + def f(a: Callable[[Sequence[int]], int], b: int) -> str: + return 'hello' + """, + ), + } + ) + def test_annotate_function_nested_imports( + self, stub: str, before: str, after: str + ) -> None: + self.run_test_case_with_flags( + stub=stub, + before=before, + after=after, + overwrite_existing_annotations=True, + ) + @data_provider( { "return_self": ( From f1b973f6b3b45a8fbd34dd15081b742c42acc103 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Tue, 25 Apr 2023 02:45:51 -0600 Subject: [PATCH 361/632] Fix pyre setup link in metadata.rst (#913) --- docs/source/metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index e568031e..b0b12800 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -242,7 +242,7 @@ In Python, type checkers like `Mypy `_ or `Pyre `__ analyze `type annotations `__ and infer types for expressions. :class:`~libcst.metadata.TypeInferenceProvider` is provided by `Pyre Query API `__ -which requires `setup watchman `_ for incremental typechecking. +which requires `setup watchman `_ for incremental typechecking. :class:`~libcst.metadata.FullRepoManger` is built for manage the inter process communication to Pyre. .. autoclass:: libcst.metadata.TypeInferenceProvider From 1889bca0e606eb152061066adaaf493ac0b45567 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Apr 2023 09:46:17 +0100 Subject: [PATCH 362/632] Bump pypa/cibuildwheel from 2.12.1 to 2.12.3 (#915) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.12.1 to 2.12.3. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.12.1...v2.12.3) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c86e54c5..65e16f80 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -58,7 +58,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.12.1 + uses: pypa/cibuildwheel@v2.12.3 - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl From fbfb83d3c6495face74e5d083fd22141b4147c2f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Apr 2023 09:46:32 +0100 Subject: [PATCH 363/632] Bump usort from 1.0.5 to 1.0.6 (#905) Bumps [usort](https://github.com/facebook/usort) from 1.0.5 to 1.0.6. - [Release notes](https://github.com/facebook/usort/releases) - [Changelog](https://github.com/facebook/usort/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/usort/compare/v1.0.5...v1.0.6) --- updated-dependencies: - dependency-name: usort dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index cb9cc8e1..1675dd38 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ dev = [ "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", "ufmt==2.0.1", - "usort==1.0.5", + "usort==1.0.6", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", "jinja2==3.1.2", From 889ce56b0fcb9724ef980c7f2305d04f6bb7cd34 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Apr 2023 10:00:48 +0100 Subject: [PATCH 364/632] Bump ufmt from 2.0.1 to 2.1.0 (#904) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.0.1 to 2.1.0. - [Release notes](https://github.com/omnilib/ufmt/releases) - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.0.1...v2.1.0) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 1675dd38..8775c21a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ dev = [ "pyre-check==0.9.10; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", - "ufmt==2.0.1", + "ufmt==2.1.0", "usort==1.0.6", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", From ea19578293f8bc20d51d3b0b3f51c7771ed52944 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Wed, 17 May 2023 16:49:40 +0200 Subject: [PATCH 365/632] Fix crash on escaped backslashes in rf-string (#921) --- native/libcst/src/tokenizer/core/mod.rs | 4 ++-- native/libcst/src/tokenizer/tests.rs | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 89877f7e..7c0f0788 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -973,8 +973,8 @@ impl<'t> TokState<'t> { } (Some('\\'), _) if is_raw_string => { self.text_pos.next(); - if let Some('"' | '\'') = self.text_pos.peek() { - // these aren't end of string markers, skip them + // skip escaped end-of-string marker or backslash + if let Some('"' | '\'' | '\\') = self.text_pos.peek() { self.text_pos.next(); } } diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index 69deaaf3..4e8ce4d3 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -529,6 +529,10 @@ fn test_string_prefix() { tokenize_all(r#"r"\"""#, &default_config()), Ok(vec![(TokType::String, r#"r"\"""#)]), ); + assert_eq!( + tokenize_all(r#"r'\\'"#, &default_config()), + Ok(vec![(TokType::String, r#"r'\\'"#)]), + ); let config = TokConfig { split_fstring: true, ..default_config() @@ -549,6 +553,14 @@ fn test_string_prefix() { (TokType::FStringEnd, "\""), ]), ); + assert_eq!( + tokenize_all(r#"rf'\\'"#, &config), + Ok(vec![ + (TokType::FStringStart, "rf'"), + (TokType::FStringString, r#"\\"#), + (TokType::FStringEnd, "'"), + ]), + ); } #[test] From 38b708b5ed524349ab3ca78f533c20d7bbaa7859 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Tue, 23 May 2023 10:56:49 +0200 Subject: [PATCH 366/632] relax validation rules on decorators (#926) * relax validation on decorators * allow any expression --------- Co-authored-by: Zsolt Dollenstein --- libcst/_nodes/statement.py | 16 +-------- libcst/_nodes/tests/test_funcdef.py | 56 ++++++++++++++++++++--------- libcst/matchers/__init__.py | 13 ++----- 3 files changed, 44 insertions(+), 41 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index ee440573..3455b47a 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -21,7 +21,6 @@ from libcst._nodes.expression import ( BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, - Call, ConcatenatedString, ExpressionPosition, From, @@ -1619,7 +1618,7 @@ class Decorator(CSTNode): #: The decorator that will return a new function wrapping the parent #: of this decorator. - decorator: Union[Name, Attribute, Call] + decorator: BaseExpression #: Line comments and empty lines before this decorator. The parent #: :class:`FunctionDef` or :class:`ClassDef` node owns leading lines before @@ -1632,19 +1631,6 @@ class Decorator(CSTNode): #: Optional trailing comment and newline following the decorator before the next line. trailing_whitespace: TrailingWhitespace = TrailingWhitespace.field() - def _validate(self) -> None: - decorator = self.decorator - if len(decorator.lpar) > 0 or len(decorator.rpar) > 0: - raise CSTValidationError( - "Cannot have parens around decorator in a Decorator." - ) - if isinstance(decorator, Call) and not isinstance( - decorator.func, (Name, Attribute) - ): - raise CSTValidationError( - "Decorator call function must be Name or Attribute node." - ) - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "Decorator": return Decorator( leading_lines=visit_sequence( diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 4711aac5..250030c4 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -623,6 +623,46 @@ class FunctionDefCreationTest(CSTNodeTest): "code": "@ bar ( )\n", "expected_position": CodeRange((1, 0), (1, 10)), }, + # Allow nested calls on decorator + { + "node": cst.FunctionDef( + cst.Name("foo"), + cst.Parameters(), + cst.SimpleStatementSuite((cst.Pass(),)), + (cst.Decorator(cst.Call(func=cst.Call(func=cst.Name("bar")))),), + ), + "code": "@bar()()\ndef foo(): pass\n", + }, + # Allow any expression in decorator + { + "node": cst.FunctionDef( + cst.Name("foo"), + cst.Parameters(), + cst.SimpleStatementSuite((cst.Pass(),)), + ( + cst.Decorator( + cst.BinaryOperation(cst.Name("a"), cst.Add(), cst.Name("b")) + ), + ), + ), + "code": "@a + b\ndef foo(): pass\n", + }, + # Allow parentheses around decorator + { + "node": cst.FunctionDef( + cst.Name("foo"), + cst.Parameters(), + cst.SimpleStatementSuite((cst.Pass(),)), + ( + cst.Decorator( + cst.Name( + "bar", lpar=(cst.LeftParen(),), rpar=(cst.RightParen(),) + ) + ), + ), + ), + "code": "@(bar)\ndef foo(): pass\n", + }, # Parameters { "node": cst.Parameters( @@ -922,22 +962,6 @@ class FunctionDefCreationTest(CSTNodeTest): ), r"Expecting a star prefix of '\*\*'", ), - # Validate decorator name semantics - ( - lambda: cst.FunctionDef( - cst.Name("foo"), - cst.Parameters(), - cst.SimpleStatementSuite((cst.Pass(),)), - ( - cst.Decorator( - cst.Name( - "bar", lpar=(cst.LeftParen(),), rpar=(cst.RightParen(),) - ) - ), - ), - ), - "Cannot have parens around decorator in a Decorator", - ), ) ) def test_invalid( diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index f28de9c7..1fc23557 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -3089,13 +3089,6 @@ class Continue(BaseSmallStatement, BaseMatcherNode): ] = DoNotCare() -NameOrAttributeOrCallMatchType = Union[ - "Name", - "Attribute", - "Call", - MetadataMatchType, - MatchIfTrue[Union[cst.Name, cst.Attribute, cst.Call]], -] TrailingWhitespaceMatchType = Union[ "TrailingWhitespace", MetadataMatchType, MatchIfTrue[cst.TrailingWhitespace] ] @@ -3104,10 +3097,10 @@ TrailingWhitespaceMatchType = Union[ @dataclass(frozen=True, eq=False, unsafe_hash=False) class Decorator(BaseMatcherNode): decorator: Union[ - NameOrAttributeOrCallMatchType, + BaseExpressionMatchType, DoNotCareSentinel, - OneOf[NameOrAttributeOrCallMatchType], - AllOf[NameOrAttributeOrCallMatchType], + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], ] = DoNotCare() leading_lines: Union[ Sequence[ From 654b14f39cb405c7508eccf54823d681dd71c89d Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 23 May 2023 12:32:16 +0100 Subject: [PATCH 367/632] CI: bump macos version (#927) * CI: bump macos version * use macos-latest --- .github/build-matrix.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/build-matrix.json b/.github/build-matrix.json index 5a611b04..3a1db7b3 100644 --- a/.github/build-matrix.json +++ b/.github/build-matrix.json @@ -9,11 +9,11 @@ }, { "vers": "arm64", - "os": "macos-10.15" + "os": "macos-latest" }, { "vers": "auto64", - "os": "macos-10.15" + "os": "macos-latest" }, { "vers": "auto64", From 858dd3d9a91ee1c52906fe487829b8410a736223 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 23 May 2023 14:37:44 +0100 Subject: [PATCH 368/632] CI: add `build` dev dependency This should fix the release pipeline which calls `python -m build` --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 8775c21a..76bf6765 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,6 +28,7 @@ dependencies = [ dev = [ "black==23.1.0", "coverage>=4.5.4", + "build>=0.10.0", "fixit==0.1.1", "flake8>=3.7.8,<5", "Sphinx>=5.1.1", From f6d87cd96861ed4299edf6b995c1580bf538fc40 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 23 May 2023 15:19:45 +0100 Subject: [PATCH 369/632] update changelog --- CHANGELOG.md | 66 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a6dfa5c..7438a9f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,69 @@ +# 0.4.10 - 2023-05-23 + +## New Contributors +* @and-semakin made their first contribution in https://github.com/Instagram/LibCST/pull/816 +* @carljm made their first contribution in https://github.com/Instagram/LibCST/pull/828 +* @sagarbadiyani made their first contribution in https://github.com/Instagram/LibCST/pull/841 +* @podtserkovskiy made their first contribution in https://github.com/Instagram/LibCST/pull/894 +* @rchen152 made their first contribution in https://github.com/Instagram/LibCST/pull/903 +* @Kludex made their first contribution in https://github.com/Instagram/LibCST/pull/913 +* @jakkdl made their first contribution in https://github.com/Instagram/LibCST/pull/921 + +## Added +* Add py3.11 classifier by @and-semakin in https://github.com/Instagram/LibCST/pull/816 +* Script to regenerate test fixtures, upgrade to Pyre 0.9.10 by @amyreese in https://github.com/Instagram/LibCST/pull/872 +* Allow FullyQualifiedNameProvider to work with absolute paths by @amyreese in https://github.com/Instagram/LibCST/pull/867 +* Allow running codemods without configuring in YAML by @akx in https://github.com/Instagram/LibCST/pull/879 +* Support PEP 604 in ApplyTypeAnnotationsVisitor by @hauntsaninja in https://github.com/Instagram/LibCST/pull/868 + +## Fixed +* fix PEP 604 union annotations in decorators by @carljm in https://github.com/Instagram/LibCST/pull/828 +* [AddImportsVisitor] Docstring Check Only for the Top Element of the Body by @sagarbadiyani in https://github.com/Instagram/LibCST/pull/841 +* Fix [#855](https://github.com/Instagram/LibCST/issues/855) - fail to parse with statement by @stroxler in https://github.com/Instagram/LibCST/pull/861 +* Add setuptools-rust to build requirements in setup.py by @amyreese in https://github.com/Instagram/LibCST/pull/873 +* Relative imports from '' package are not allowed by @podtserkovskiy in https://github.com/Instagram/LibCST/pull/894 +* Use subprocess.DEVNULL instead of opening os.devnull by hand by @akx in https://github.com/Instagram/LibCST/pull/897 +* Ensure current Python interpreter is used for subprocesses by @akx in https://github.com/Instagram/LibCST/pull/898 +* Fix ApplyTypeAnnotationsVisitor behavior on attribute assignments. by @rchen152 in https://github.com/Instagram/LibCST/pull/903 +* Fix spelling and grammar in some comments by @stroxler in https://github.com/Instagram/LibCST/pull/908 +* skip escaped backslash in rf-string by @jakkdl in https://github.com/Instagram/LibCST/pull/921 +* relax validation rules on decorators by @jakkdl in https://github.com/Instagram/LibCST/pull/926 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v0.4.9...v0.4.10 + +# 0.4.9 - 2022-11-10 + +## Updated +* Bump setuptools-rust version by @zsol in https://github.com/Instagram/LibCST/pull/809 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v0.4.8...v0.4.9 + +# 0.4.8 - 2022-11-10 + +## New Contributors +* @dhruvmanila made their first contribution in https://github.com/Instagram/LibCST/pull/728 +* @vfazio made their first contribution in https://github.com/Instagram/LibCST/pull/801 +* @matthewshaer made their first contribution in https://github.com/Instagram/LibCST/pull/807 + + +## Fixed +* Fix parse error message for number parsing by @zzl0 in https://github.com/Instagram/LibCST/pull/724 +* Fix problematic doc build, due to the new builder image provided by readthedocs doesn't has the `graphviz-dev` package pre-installed any more by @MapleCCC in https://github.com/Instagram/LibCST/pull/751 +* Fix docstring of `FullRepoManager` by @MapleCCC in https://github.com/Instagram/LibCST/pull/750 +* Fix bug when `TypeOf` is one of options in `OneOf` / `AllOf` by @MapleCCC in https://github.com/Instagram/LibCST/pull/756 +* Tighten the metadata type of `ExpressionContextProvider` by @MapleCCC in https://github.com/Instagram/LibCST/pull/760 +* Fix the bug that the use of formatter in codemods has undetermined target Python version, resulting in hard-to-reason-with behavior by @MapleCCC in https://github.com/Instagram/LibCST/pull/771 + + +## Added +* Python 3.11 rutime support + * test using python 3.11 beta versions by @zsol in https://github.com/Instagram/LibCST/pull/723 + * Python 3.11 wheels by @vfazio in https://github.com/Instagram/LibCST/pull/801 +* Raise informative exception when metadata is unresolved in a metadata-based match by @MapleCCC in https://github.com/Instagram/LibCST/pull/757 +* Add AccessorProvider by @matthewshaer in https://github.com/Instagram/LibCST/pull/807 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v0.4.7...v0.4.8 + # 0.4.7 - 2022-07-12 ## New Contributors From bd96010782d0db3ba1cec98f298108c42ef62920 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 24 May 2023 20:36:31 +0100 Subject: [PATCH 370/632] exclude native/target directory from sdist (#928) --- MANIFEST.in | 1 + setup.py | 1 + 2 files changed, 2 insertions(+) diff --git a/MANIFEST.in b/MANIFEST.in index 17070b66..8fd03bd0 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,3 +2,4 @@ include README.rst LICENSE CODE_OF_CONDUCT.md CONTRIBUTING.md docs/source/*.rst include native/Cargo.toml recursive-include native * +recursive-exclude native/target * \ No newline at end of file diff --git a/setup.py b/setup.py index c81bbce0..12180cbc 100644 --- a/setup.py +++ b/setup.py @@ -8,6 +8,7 @@ from os import environ import setuptools from setuptools_rust import Binding, RustExtension + def no_local_scheme(version: str) -> str: return "" From 8216b8add22426bef90503bea2088f28cab5084b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 May 2023 21:13:56 +0100 Subject: [PATCH 371/632] Bump Swatinem/rust-cache from 1.3.0 to 2.4.0 (#925) Bumps [Swatinem/rust-cache](https://github.com/Swatinem/rust-cache) from 1.3.0 to 2.4.0. - [Release notes](https://github.com/Swatinem/rust-cache/releases) - [Changelog](https://github.com/Swatinem/rust-cache/blob/master/CHANGELOG.md) - [Commits](https://github.com/Swatinem/rust-cache/compare/v1.3.0...v2.4.0) --- updated-dependencies: - dependency-name: Swatinem/rust-cache dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 65e16f80..867ae081 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -50,7 +50,7 @@ jobs: python-version: "3.10" - name: Rust Cache if: ${{ !contains(matrix.os, 'self-hosted') }} - uses: Swatinem/rust-cache@v1.3.0 + uses: Swatinem/rust-cache@v2.4.0 with: working-directory: native - name: Disable scmtools local scheme diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a8dcc1cc..ab358185 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,7 +35,7 @@ jobs: with: toolchain: stable - name: Rust Cache - uses: Swatinem/rust-cache@v1.3.0 + uses: Swatinem/rust-cache@v2.4.0 with: working-directory: native - name: Build LibCST @@ -146,7 +146,7 @@ jobs: toolchain: stable components: rustfmt, clippy - name: Rust Cache - uses: Swatinem/rust-cache@v1.3.0 + uses: Swatinem/rust-cache@v2.4.0 with: working-directory: native - uses: actions/setup-python@v4 From 193fab43571e9f1464baf72e1ec74fca570f6810 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 25 May 2023 18:24:59 +0100 Subject: [PATCH 372/632] Switch default parser implementation to native (#929) The old parser is now only available using LIBCST_PARSER_TYPE=pure --- .github/workflows/ci.yml | 6 +++--- CHANGELOG.md | 11 +++++++++++ libcst/_parser/entrypoints.py | 2 +- native/libcst/README.md | 5 ++--- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ab358185..aad99e79 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -40,11 +40,11 @@ jobs: working-directory: native - name: Build LibCST run: hatch env create - - name: Pure Tests + - name: Tests run: hatch run test - - name: Native Tests + - name: Pure Parser Tests env: - LIBCST_PARSER_TYPE: ${{ matrix.parser }} + LIBCST_PARSER_TYPE: pure run: hatch run test # Run linters diff --git a/CHANGELOG.md b/CHANGELOG.md index 7438a9f3..aca687c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +# 1.0.0 - 2023-05-25 + +The first major release of LibCST is essentially the same as 0.4.10, but using the +newer, Rust-based parser implementation by default. The old, pure Python parser is +scheduled for removal in the next (non-patch) release. Until then, it is available with +the `LIBCST_PARSER_TYPE` environment variable set to `pure`. + +## Updated + +* Switch the default parser implementation to native by @zsol in https://github.com/Instagram/LibCST/pull/929 + # 0.4.10 - 2023-05-23 ## New Contributors diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index e47d9fc6..d9cee5e9 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -27,7 +27,7 @@ _DEFAULT_PARTIAL_PARSER_CONFIG: PartialParserConfig = PartialParserConfig() def is_native() -> bool: typ = os.environ.get("LIBCST_PARSER_TYPE") - return typ == "native" + return typ != "pure" def _parse( diff --git a/native/libcst/README.md b/native/libcst/README.md index 2af0f1f0..09fe8f99 100644 --- a/native/libcst/README.md +++ b/native/libcst/README.md @@ -3,9 +3,8 @@ A native extension to enable parsing of new Python grammar in LibCST. The extension is written in Rust, and exposed to Python using [PyO3](https://pyo3.rs/). -This is packaged together with libcst, and can be imported from `libcst.native`. When -the `LIBCST_PARSER_TYPE` environment variable is set to `native`, the LibCST APIs use -this module for all parsing. +This is packaged together with libcst, and can be imported from `libcst.native`. By default +the LibCST APIs use this module for all parsing. Later on, the parser library might be packaged separately as [a Rust crate](https://crates.io). Pull requests towards this are much appreciated. From a594fe1dd2a2843034d79a7a3edf37c69f9cbd5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sigurd=20Lj=C3=B8dal?= <544451+ljodal@users.noreply.github.com> Date: Fri, 26 May 2023 14:43:05 +0200 Subject: [PATCH 373/632] Fix type of `evaluated_value` on string to allow bytes (#721) * Fix type of evaluated_value on string This can return bytes if the string is a bytestring, e.g.: In [1]: import libcst as cst In [2]: cst.parse_expression('b"foo"').evaluated_value Out[2]: b'foo' * Fix type errors from changed signature --- libcst/_nodes/expression.py | 10 +++++++--- libcst/_nodes/statement.py | 2 ++ libcst/codegen/gen_matcher_classes.py | 4 +++- libcst/codemod/commands/strip_strings_from_types.py | 10 +++++++--- libcst/codemod/visitors/_gather_exports.py | 2 +- 5 files changed, 20 insertions(+), 8 deletions(-) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 54202347..be0589bb 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -699,7 +699,7 @@ class SimpleString(_BasePrefixedString): state.add_token(self.value) @property - def evaluated_value(self) -> str: + def evaluated_value(self) -> Union[str, bytes]: """ Return an :func:`ast.literal_eval` evaluated str of :py:attr:`value`. """ @@ -1034,7 +1034,7 @@ class ConcatenatedString(BaseString): self.right._codegen(state) @property - def evaluated_value(self) -> Optional[str]: + def evaluated_value(self) -> Union[str, bytes, None]: """ Return an :func:`ast.literal_eval` evaluated str of recursively concatenated :py:attr:`left` and :py:attr:`right` if and only if both :py:attr:`left` and :py:attr:`right` are composed by :class:`SimpleString` or :class:`ConcatenatedString` @@ -1048,7 +1048,11 @@ class ConcatenatedString(BaseString): right_val = right.evaluated_value if right_val is None: return None - return left_val + right_val + if isinstance(left_val, bytes) and isinstance(right_val, bytes): + return left_val + right_val + if isinstance(left_val, str) and isinstance(right_val, str): + return left_val + right_val + return None @add_slots diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 3455b47a..8cd171e0 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -1686,6 +1686,8 @@ def get_docstring_impl( evaluated_value = val.evaluated_value else: return None + if isinstance(evaluated_value, bytes): + return None if evaluated_value is not None and clean: return inspect.cleandoc(evaluated_value) diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index 776db80b..8ac8a466 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -265,7 +265,9 @@ def _get_raw_name(node: cst.CSTNode) -> Optional[str]: if isinstance(node, cst.Name): return node.value elif isinstance(node, cst.SimpleString): - return node.evaluated_value + evaluated_value = node.evaluated_value + if isinstance(evaluated_value, str): + return evaluated_value elif isinstance(node, cst.SubscriptElement): return _get_raw_name(node.slice) elif isinstance(node, cst.Index): diff --git a/libcst/codemod/commands/strip_strings_from_types.py b/libcst/codemod/commands/strip_strings_from_types.py index 871d2513..3f0894cd 100644 --- a/libcst/codemod/commands/strip_strings_from_types.py +++ b/libcst/codemod/commands/strip_strings_from_types.py @@ -43,8 +43,12 @@ class StripStringsCommand(VisitorBasedCodemodCommand): self, original_node: libcst.SimpleString, updated_node: libcst.SimpleString ) -> Union[libcst.SimpleString, libcst.BaseExpression]: AddImportsVisitor.add_needed_import(self.context, "__future__", "annotations") + evaluated_value = updated_node.evaluated_value # Just use LibCST to evaluate the expression itself, and insert that as the # annotation. - return parse_expression( - updated_node.evaluated_value, config=self.module.config_for_parsing - ) + if isinstance(evaluated_value, str): + return parse_expression( + evaluated_value, config=self.module.config_for_parsing + ) + else: + return updated_node diff --git a/libcst/codemod/visitors/_gather_exports.py b/libcst/codemod/visitors/_gather_exports.py index a35b389a..bb1c8894 100644 --- a/libcst/codemod/visitors/_gather_exports.py +++ b/libcst/codemod/visitors/_gather_exports.py @@ -140,6 +140,6 @@ class GatherExportsVisitor(ContextAwareVisitor): ) -> None: if self._in_assigned_export: name = node.evaluated_value - if name is None: + if not isinstance(name, str): return self.explicit_exported_objects.add(name) From ee80bf20e9b9b6ad16f8b9e6d4d662eb28ffcdef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 May 2023 13:44:11 +0100 Subject: [PATCH 374/632] Update maturin requirement from <0.14,>=0.8.3 to >=0.8.3,<0.16 (#920) Updates the requirements on [maturin](https://github.com/pyo3/maturin) to permit the latest version. - [Release notes](https://github.com/pyo3/maturin/releases) - [Changelog](https://github.com/PyO3/maturin/blob/main/Changelog.md) - [Commits](https://github.com/pyo3/maturin/compare/v0.8.3...v0.15.1) --- updated-dependencies: - dependency-name: maturin dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 76bf6765..3848947d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ dev = [ "hypothesis>=4.36.0", "hypothesmith>=0.0.4", "jupyter>=1.0.0", - "maturin>=0.8.3,<0.14", + "maturin>=0.8.3,<0.16", "nbsphinx>=0.4.2", "prompt-toolkit>=2.0.9", "pyre-check==0.9.10; platform_system != 'Windows'", From 59aeceb17e39d9eb6158036f6e7ef7d4aab78115 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 May 2023 13:44:46 +0100 Subject: [PATCH 375/632] Bump black from 23.1.0 to 23.3.0 (#918) Bumps [black](https://github.com/psf/black) from 23.1.0 to 23.3.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.1.0...23.3.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3848947d..275c9c8e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ [project.optional-dependencies] dev = [ - "black==23.1.0", + "black==23.3.0", "coverage>=4.5.4", "build>=0.10.0", "fixit==0.1.1", From de57f7cc639297263e78c70fd4e1e9cff555877c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Jun 2023 07:25:40 +0200 Subject: [PATCH 376/632] Bump pypa/cibuildwheel from 2.12.3 to 2.13.0 (#942) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.12.3 to 2.13.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.12.3...v2.13.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 867ae081..60a0e16b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -58,7 +58,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.12.3 + uses: pypa/cibuildwheel@v2.13.0 - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl From 0f78b810a4c87c40b5bf10d206ef6ca6dbad780e Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Wed, 7 Jun 2023 13:22:54 +0200 Subject: [PATCH 377/632] remove quotes around charset in .editorconfig (#949) --- .editorconfig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index c4f3c65d..9f02a19e 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,7 +1,7 @@ root = true [*.{py,pyi,rs,toml,md}] -charset = "utf-8" +charset = utf-8 end_of_line = lf indent_size = 4 indent_style = space From 062bcdb07e56d32061f515f51178705206b8c237 Mon Sep 17 00:00:00 2001 From: Sergii Dymchenko Date: Wed, 7 Jun 2023 04:23:12 -0700 Subject: [PATCH 378/632] Fix Sentinal typo (#948) --- libcst/_nodes/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index 4fe5edbf..d9689f8f 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -383,7 +383,7 @@ class CSTNode(ABC): new_tree = self.visit(_ChildReplacementTransformer(old_node, new_node)) if isinstance(new_tree, (FlattenSentinel, RemovalSentinel)): # The above transform never returns *Sentinel, so this isn't possible - raise Exception("Logic error, cannot get a *Sentinal here!") + raise Exception("Logic error, cannot get a *Sentinel here!") return new_tree def deep_remove( From 648e1616bebf2c6477ca6c09ec25d3d7dad79fd9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Jun 2023 12:23:25 +0100 Subject: [PATCH 379/632] Bump usort from 1.0.6 to 1.0.7 (#946) Bumps [usort](https://github.com/facebook/usort) from 1.0.6 to 1.0.7. - [Changelog](https://github.com/facebook/usort/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/usort/compare/v1.0.6...v1.0.7) --- updated-dependencies: - dependency-name: usort dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 275c9c8e..59ce2602 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ dev = [ "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", "ufmt==2.1.0", - "usort==1.0.6", + "usort==1.0.7", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", "jinja2==3.1.2", From 2acc2933473da4a73b1c06be76fb9b56d15c7389 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 7 Jun 2023 13:37:16 +0200 Subject: [PATCH 380/632] Fix whitespace, fstring, walrus related parse errors (#939, #938, #937, #936, #935, #934, #933, #932, #931) * Allow walrus in slices See https://github.com/python/cpython/pull/23317 Raised in #930. * Fix parsing of nested f-string specifiers For an expression like `f"{one:{two:}{three}}"`, `three` is not in an f-string spec, and should be tokenized accordingly. This PR fixes the `format_spec_count` bookkeeping in the tokenizer, so it properly decrements it when a closing `}` is encountered but only if the `}` closes a format_spec. Reported in #930. * Fix tokenizing `0else` This is an obscure one. `_ if 0else _` failed to parse with some very weird errors. It turns out that the tokenizer tries to parse `0else` as a single number, but when it encounters `l` it realizes it can't be a single number and it backtracks. Unfortunately the backtracking logic was broken, and it failed to correctly backtrack one of the offsets used for whitespace parsing (the byte offset since the start of the line). This caused whitespace nodes to refer to incorrect parts of the input text, eventually resulting in the above behavior. This PR fixes the bookkeeping when the tokenizer backtracks. Reported in #930. * Allow no whitespace between lambda keyword and params in certain cases Python accepts code where `lambda` follows a `*`, so this PR relaxes validation rules for Lambdas. Raised in #930. * Allow any expression in comprehensions' evaluated expression This PR relaxes the accepted types for the `elt` field in `ListComp`, `SetComp`, and `GenExp`, as well as the `key` and `value` fields in `DictComp`. Fixes #500. * Allow no space around an ifexp in certain cases For example in `_ if _ else""if _ else _`. Raised in #930. Also fixes #854. * Allow no spaces after `as` in a contextmanager in certain cases Like in `with foo()as():pass` Raised in #930. * Allow no spaces around walrus in certain cases Like in `[_:=''for _ in _]` Raised in #930. * Allow no whitespace after lambda body in certain cases Like in `[lambda:()for _ in _]` Reported in #930. --- libcst/_nodes/expression.py | 54 +- libcst/_nodes/statement.py | 5 +- libcst/_nodes/tests/test_dict_comp.py | 11 + libcst/_nodes/tests/test_ifexp.py | 35 + libcst/_nodes/tests/test_lambda.py | 71 +- libcst/_nodes/tests/test_namedexpr.py | 16 + libcst/_nodes/tests/test_simple_comp.py | 27 + libcst/_nodes/tests/test_with.py | 17 + libcst/_typed_visitor.py | 14324 +++---- libcst/matchers/__init__.py | 31854 ++++++++-------- libcst/matchers/_return_types.py | 726 +- native/libcst/src/parser/grammar.rs | 2 +- native/libcst/src/tokenizer/core/mod.rs | 4 - .../libcst/src/tokenizer/core/string_types.rs | 7 +- native/libcst/src/tokenizer/tests.rs | 26 + .../libcst/src/tokenizer/text_position/mod.rs | 5 + native/libcst/tests/fixtures/expr.py | 1 + native/libcst/tests/fixtures/super_strings.py | 2 + native/libcst/tests/fixtures/wonky_walrus.py | 4 +- 19 files changed, 23698 insertions(+), 23493 deletions(-) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index be0589bb..074fc71f 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -1983,6 +1983,25 @@ class Parameters(CSTNode): star_kwarg=visit_optional(self, "star_kwarg", self.star_kwarg, visitor), ) + def _safe_to_join_with_lambda(self) -> bool: + """ + Determine if Parameters need a space after the `lambda` keyword. Returns True + iff it's safe to omit the space between `lambda` and these Parameters. + + See also `BaseExpression._safe_to_use_with_word_operator`. + + For example: `lambda*_: pass` + """ + if len(self.posonly_params) != 0: + return False + + # posonly_ind can't appear if above condition is false + + if len(self.params) > 0 and self.params[0].star not in {"*", "**"}: + return False + + return True + def _codegen_impl(self, state: CodegenState) -> None: # noqa: C901 # Compute the star existence first so we can ask about whether # each element is the last in the list or not. @@ -2088,6 +2107,13 @@ class Lambda(BaseExpression): BaseParenthesizableWhitespace, MaybeSentinel ] = MaybeSentinel.DEFAULT + def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: + if position == ExpressionPosition.LEFT: + return len(self.rpar) > 0 or self.body._safe_to_use_with_word_operator( + position + ) + return super()._safe_to_use_with_word_operator(position) + def _validate(self) -> None: # Validate parents super(Lambda, self)._validate() @@ -2115,6 +2141,7 @@ class Lambda(BaseExpression): if ( isinstance(whitespace_after_lambda, BaseParenthesizableWhitespace) and whitespace_after_lambda.empty + and not self.params._safe_to_join_with_lambda() ): raise CSTValidationError( "Must have at least one space after lambda when specifying params" @@ -2492,6 +2519,12 @@ class IfExp(BaseExpression): #: Whitespace after the ``else`` keyword, but before the ``orelse`` expression. whitespace_after_else: BaseParenthesizableWhitespace = SimpleWhitespace.field(" ") + def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: + if position == ExpressionPosition.RIGHT: + return self.body._safe_to_use_with_word_operator(position) + else: + return self.orelse._safe_to_use_with_word_operator(position) + def _validate(self) -> None: # Paren validation and such super(IfExp, self)._validate() @@ -3495,7 +3528,7 @@ class BaseSimpleComp(BaseComp, ABC): #: The expression evaluated during each iteration of the comprehension. This #: lexically comes before the ``for_in`` clause, but it is semantically the #: inner-most element, evaluated inside the ``for_in`` clause. - elt: BaseAssignTargetExpression + elt: BaseExpression #: The ``for ... in ... if ...`` clause that lexically comes after ``elt``. This may #: be a nested structure for nested comprehensions. See :class:`CompFor` for @@ -3528,7 +3561,7 @@ class GeneratorExp(BaseSimpleComp): """ #: The expression evaluated and yielded during each iteration of the generator. - elt: BaseAssignTargetExpression + elt: BaseExpression #: The ``for ... in ... if ...`` clause that comes after ``elt``. This may be a #: nested structure for nested comprehensions. See :class:`CompFor` for details. @@ -3579,7 +3612,7 @@ class ListComp(BaseList, BaseSimpleComp): """ #: The expression evaluated and stored during each iteration of the comprehension. - elt: BaseAssignTargetExpression + elt: BaseExpression #: The ``for ... in ... if ...`` clause that comes after ``elt``. This may be a #: nested structure for nested comprehensions. See :class:`CompFor` for details. @@ -3621,7 +3654,7 @@ class SetComp(BaseSet, BaseSimpleComp): """ #: The expression evaluated and stored during each iteration of the comprehension. - elt: BaseAssignTargetExpression + elt: BaseExpression #: The ``for ... in ... if ...`` clause that comes after ``elt``. This may be a #: nested structure for nested comprehensions. See :class:`CompFor` for details. @@ -3663,10 +3696,10 @@ class DictComp(BaseDict, BaseComp): """ #: The key inserted into the dictionary during each iteration of the comprehension. - key: BaseAssignTargetExpression + key: BaseExpression #: The value associated with the ``key`` inserted into the dictionary during each #: iteration of the comprehension. - value: BaseAssignTargetExpression + value: BaseExpression #: The ``for ... in ... if ...`` clause that lexically comes after ``key`` and #: ``value``. This may be a nested structure for nested comprehensions. See @@ -3770,6 +3803,15 @@ class NamedExpr(BaseExpression): rpar=visit_sequence(self, "rpar", self.rpar, visitor), ) + def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: + if position == ExpressionPosition.LEFT: + return len(self.rpar) > 0 or self.value._safe_to_use_with_word_operator( + position + ) + return len(self.lpar) > 0 or self.target._safe_to_use_with_word_operator( + position + ) + def _codegen_impl(self, state: CodegenState) -> None: with self._parenthesize(state): self.target._codegen(state) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 8cd171e0..de5161fa 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -745,7 +745,10 @@ class AsName(CSTNode): whitespace_after_as: BaseParenthesizableWhitespace = SimpleWhitespace.field(" ") def _validate(self) -> None: - if self.whitespace_after_as.empty: + if ( + self.whitespace_after_as.empty + and not self.name._safe_to_use_with_word_operator(ExpressionPosition.RIGHT) + ): raise CSTValidationError( "There must be at least one space between 'as' and name." ) diff --git a/libcst/_nodes/tests/test_dict_comp.py b/libcst/_nodes/tests/test_dict_comp.py index a9970f9d..a753375f 100644 --- a/libcst/_nodes/tests/test_dict_comp.py +++ b/libcst/_nodes/tests/test_dict_comp.py @@ -26,6 +26,17 @@ class DictCompTest(CSTNodeTest): "parser": parse_expression, "expected_position": CodeRange((1, 0), (1, 17)), }, + # non-trivial keys & values in DictComp + { + "node": cst.DictComp( + cst.BinaryOperation(cst.Name("k1"), cst.Add(), cst.Name("k2")), + cst.BinaryOperation(cst.Name("v1"), cst.Add(), cst.Name("v2")), + cst.CompFor(target=cst.Name("a"), iter=cst.Name("b")), + ), + "code": "{k1 + k2: v1 + v2 for a in b}", + "parser": parse_expression, + "expected_position": CodeRange((1, 0), (1, 29)), + }, # custom whitespace around colon { "node": cst.DictComp( diff --git a/libcst/_nodes/tests/test_ifexp.py b/libcst/_nodes/tests/test_ifexp.py index e00924b1..dd260ef3 100644 --- a/libcst/_nodes/tests/test_ifexp.py +++ b/libcst/_nodes/tests/test_ifexp.py @@ -52,6 +52,41 @@ class IfExpTest(CSTNodeTest): "(foo)if(bar)else(baz)", CodeRange((1, 0), (1, 21)), ), + ( + cst.IfExp( + body=cst.Name("foo"), + whitespace_before_if=cst.SimpleWhitespace(" "), + whitespace_after_if=cst.SimpleWhitespace(" "), + test=cst.Name("bar"), + whitespace_before_else=cst.SimpleWhitespace(" "), + whitespace_after_else=cst.SimpleWhitespace(""), + orelse=cst.IfExp( + body=cst.SimpleString("''"), + whitespace_before_if=cst.SimpleWhitespace(""), + test=cst.Name("bar"), + orelse=cst.Name("baz"), + ), + ), + "foo if bar else''if bar else baz", + CodeRange((1, 0), (1, 32)), + ), + ( + cst.GeneratorExp( + elt=cst.IfExp( + body=cst.Name("foo"), + test=cst.Name("bar"), + orelse=cst.SimpleString("''"), + whitespace_after_else=cst.SimpleWhitespace(""), + ), + for_in=cst.CompFor( + target=cst.Name("_"), + iter=cst.Name("_"), + whitespace_before=cst.SimpleWhitespace(""), + ), + ), + "(foo if bar else''for _ in _)", + CodeRange((1, 1), (1, 28)), + ), # Make sure that spacing works ( cst.IfExp( diff --git a/libcst/_nodes/tests/test_lambda.py b/libcst/_nodes/tests/test_lambda.py index f956ee03..64a561ed 100644 --- a/libcst/_nodes/tests/test_lambda.py +++ b/libcst/_nodes/tests/test_lambda.py @@ -303,30 +303,6 @@ class LambdaCreationTest(CSTNodeTest): ), "at least one space after lambda", ), - ( - lambda: cst.Lambda( - cst.Parameters(star_arg=cst.Param(cst.Name("arg"))), - cst.Integer("5"), - whitespace_after_lambda=cst.SimpleWhitespace(""), - ), - "at least one space after lambda", - ), - ( - lambda: cst.Lambda( - cst.Parameters(kwonly_params=(cst.Param(cst.Name("arg")),)), - cst.Integer("5"), - whitespace_after_lambda=cst.SimpleWhitespace(""), - ), - "at least one space after lambda", - ), - ( - lambda: cst.Lambda( - cst.Parameters(star_kwarg=cst.Param(cst.Name("arg"))), - cst.Integer("5"), - whitespace_after_lambda=cst.SimpleWhitespace(""), - ), - "at least one space after lambda", - ), ( lambda: cst.Lambda( cst.Parameters( @@ -944,6 +920,53 @@ class LambdaParserTest(CSTNodeTest): ), "( lambda : 5 )", ), + # No space between lambda and params + ( + cst.Lambda( + cst.Parameters(star_arg=cst.Param(cst.Name("args"), star="*")), + cst.Integer("5"), + whitespace_after_lambda=cst.SimpleWhitespace(""), + ), + "lambda*args: 5", + ), + ( + cst.Lambda( + cst.Parameters(star_kwarg=cst.Param(cst.Name("kwargs"), star="**")), + cst.Integer("5"), + whitespace_after_lambda=cst.SimpleWhitespace(""), + ), + "lambda**kwargs: 5", + ), + ( + cst.Lambda( + cst.Parameters( + star_arg=cst.ParamStar( + comma=cst.Comma( + cst.SimpleWhitespace(""), cst.SimpleWhitespace("") + ) + ), + kwonly_params=[cst.Param(cst.Name("args"), star="")], + ), + cst.Integer("5"), + whitespace_after_lambda=cst.SimpleWhitespace(""), + ), + "lambda*,args: 5", + ), + ( + cst.ListComp( + elt=cst.Lambda( + params=cst.Parameters(), + body=cst.Tuple(()), + colon=cst.Colon(), + ), + for_in=cst.CompFor( + target=cst.Name("_"), + iter=cst.Name("_"), + whitespace_before=cst.SimpleWhitespace(""), + ), + ), + "[lambda:()for _ in _]", + ), ) ) def test_valid( diff --git a/libcst/_nodes/tests/test_namedexpr.py b/libcst/_nodes/tests/test_namedexpr.py index f24045ca..bddd4f3d 100644 --- a/libcst/_nodes/tests/test_namedexpr.py +++ b/libcst/_nodes/tests/test_namedexpr.py @@ -166,6 +166,22 @@ class NamedExprTest(CSTNodeTest): "parser": _parse_expression_force_38, "expected_position": None, }, + { + "node": cst.ListComp( + elt=cst.NamedExpr( + cst.Name("_"), + cst.SimpleString("''"), + whitespace_after_walrus=cst.SimpleWhitespace(""), + whitespace_before_walrus=cst.SimpleWhitespace(""), + ), + for_in=cst.CompFor( + target=cst.Name("_"), + iter=cst.Name("_"), + whitespace_before=cst.SimpleWhitespace(""), + ), + ), + "code": "[_:=''for _ in _]", + }, ) ) def test_valid(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_simple_comp.py b/libcst/_nodes/tests/test_simple_comp.py index 4de0c0a9..33ba4164 100644 --- a/libcst/_nodes/tests/test_simple_comp.py +++ b/libcst/_nodes/tests/test_simple_comp.py @@ -41,6 +41,33 @@ class SimpleCompTest(CSTNodeTest): "code": "{a for b in c}", "parser": parse_expression, }, + # non-trivial elt in GeneratorExp + { + "node": cst.GeneratorExp( + cst.BinaryOperation(cst.Name("a1"), cst.Add(), cst.Name("a2")), + cst.CompFor(target=cst.Name("b"), iter=cst.Name("c")), + ), + "code": "(a1 + a2 for b in c)", + "parser": parse_expression, + }, + # non-trivial elt in ListComp + { + "node": cst.ListComp( + cst.BinaryOperation(cst.Name("a1"), cst.Add(), cst.Name("a2")), + cst.CompFor(target=cst.Name("b"), iter=cst.Name("c")), + ), + "code": "[a1 + a2 for b in c]", + "parser": parse_expression, + }, + # non-trivial elt in SetComp + { + "node": cst.SetComp( + cst.BinaryOperation(cst.Name("a1"), cst.Add(), cst.Name("a2")), + cst.CompFor(target=cst.Name("b"), iter=cst.Name("c")), + ), + "code": "{a1 + a2 for b in c}", + "parser": parse_expression, + }, # async GeneratorExp { "node": cst.GeneratorExp( diff --git a/libcst/_nodes/tests/test_with.py b/libcst/_nodes/tests/test_with.py index 1310b3f8..517ce357 100644 --- a/libcst/_nodes/tests/test_with.py +++ b/libcst/_nodes/tests/test_with.py @@ -102,6 +102,23 @@ class WithTest(CSTNodeTest): "code": "with context_mgr() as ctx: pass\n", "parser": parse_statement, }, + { + "node": cst.With( + ( + cst.WithItem( + cst.Call(cst.Name("context_mgr")), + cst.AsName( + cst.Tuple(()), + whitespace_after_as=cst.SimpleWhitespace(""), + whitespace_before_as=cst.SimpleWhitespace(""), + ), + ), + ), + cst.SimpleStatementSuite((cst.Pass(),)), + ), + "code": "with context_mgr()as(): pass\n", + "parser": parse_statement, + }, # indentation { "node": DummyIndentedBlock( diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index b63bdeec..a28f3fd1 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -1,7162 +1,7162 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -# This file was generated by libcst.codegen.gen_matcher_classes -from typing import Optional, TYPE_CHECKING, Union - -from libcst._flatten_sentinel import FlattenSentinel -from libcst._maybe_sentinel import MaybeSentinel -from libcst._removal_sentinel import RemovalSentinel -from libcst._typed_visitor_base import mark_no_op - - -if TYPE_CHECKING: - from libcst._nodes.expression import ( # noqa: F401 - Annotation, - Arg, - Asynchronous, - Attribute, - Await, - BaseDictElement, - BaseElement, - BaseExpression, - BaseFormattedStringContent, - BaseSlice, - BinaryOperation, - BooleanOperation, - Call, - Comparison, - ComparisonTarget, - CompFor, - CompIf, - ConcatenatedString, - Dict, - DictComp, - DictElement, - Element, - Ellipsis, - Float, - FormattedString, - FormattedStringExpression, - FormattedStringText, - From, - GeneratorExp, - IfExp, - Imaginary, - Index, - Integer, - Lambda, - LeftCurlyBrace, - LeftParen, - LeftSquareBracket, - List, - ListComp, - Name, - NamedExpr, - Param, - Parameters, - ParamSlash, - ParamStar, - RightCurlyBrace, - RightParen, - RightSquareBracket, - Set, - SetComp, - SimpleString, - Slice, - StarredDictElement, - StarredElement, - Subscript, - SubscriptElement, - Tuple, - UnaryOperation, - Yield, - ) - from libcst._nodes.module import Module # noqa: F401 - from libcst._nodes.op import ( # noqa: F401 - Add, - AddAssign, - And, - AssignEqual, - BaseAugOp, - BaseBinaryOp, - BaseBooleanOp, - BaseCompOp, - BaseUnaryOp, - BitAnd, - BitAndAssign, - BitInvert, - BitOr, - BitOrAssign, - BitXor, - BitXorAssign, - Colon, - Comma, - Divide, - DivideAssign, - Dot, - Equal, - FloorDivide, - FloorDivideAssign, - GreaterThan, - GreaterThanEqual, - ImportStar, - In, - Is, - IsNot, - LeftShift, - LeftShiftAssign, - LessThan, - LessThanEqual, - MatrixMultiply, - MatrixMultiplyAssign, - Minus, - Modulo, - ModuloAssign, - Multiply, - MultiplyAssign, - Not, - NotEqual, - NotIn, - Or, - Plus, - Power, - PowerAssign, - RightShift, - RightShiftAssign, - Semicolon, - Subtract, - SubtractAssign, - ) - from libcst._nodes.statement import ( # noqa: F401 - AnnAssign, - AsName, - Assert, - Assign, - AssignTarget, - AugAssign, - BaseSmallStatement, - BaseStatement, - BaseSuite, - Break, - ClassDef, - Continue, - Decorator, - Del, - Else, - ExceptHandler, - ExceptStarHandler, - Expr, - Finally, - For, - FunctionDef, - Global, - If, - Import, - ImportAlias, - ImportFrom, - IndentedBlock, - Match, - MatchAs, - MatchCase, - MatchClass, - MatchKeywordElement, - MatchList, - MatchMapping, - MatchMappingElement, - MatchOr, - MatchOrElement, - MatchPattern, - MatchSequence, - MatchSequenceElement, - MatchSingleton, - MatchStar, - MatchTuple, - MatchValue, - NameItem, - Nonlocal, - Pass, - Raise, - Return, - SimpleStatementLine, - SimpleStatementSuite, - Try, - TryStar, - While, - With, - WithItem, - ) - from libcst._nodes.whitespace import ( # noqa: F401 - BaseParenthesizableWhitespace, - Comment, - EmptyLine, - Newline, - ParenthesizedWhitespace, - SimpleWhitespace, - TrailingWhitespace, - ) - - -class CSTTypedBaseFunctions: - @mark_no_op - def visit_Add(self, node: "Add") -> Optional[bool]: - pass - - @mark_no_op - def visit_Add_whitespace_before(self, node: "Add") -> None: - pass - - @mark_no_op - def leave_Add_whitespace_before(self, node: "Add") -> None: - pass - - @mark_no_op - def visit_Add_whitespace_after(self, node: "Add") -> None: - pass - - @mark_no_op - def leave_Add_whitespace_after(self, node: "Add") -> None: - pass - - @mark_no_op - def visit_AddAssign(self, node: "AddAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_AddAssign_whitespace_before(self, node: "AddAssign") -> None: - pass - - @mark_no_op - def leave_AddAssign_whitespace_before(self, node: "AddAssign") -> None: - pass - - @mark_no_op - def visit_AddAssign_whitespace_after(self, node: "AddAssign") -> None: - pass - - @mark_no_op - def leave_AddAssign_whitespace_after(self, node: "AddAssign") -> None: - pass - - @mark_no_op - def visit_And(self, node: "And") -> Optional[bool]: - pass - - @mark_no_op - def visit_And_whitespace_before(self, node: "And") -> None: - pass - - @mark_no_op - def leave_And_whitespace_before(self, node: "And") -> None: - pass - - @mark_no_op - def visit_And_whitespace_after(self, node: "And") -> None: - pass - - @mark_no_op - def leave_And_whitespace_after(self, node: "And") -> None: - pass - - @mark_no_op - def visit_AnnAssign(self, node: "AnnAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_AnnAssign_target(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_target(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_AnnAssign_annotation(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_annotation(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_AnnAssign_value(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_value(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_AnnAssign_equal(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_equal(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_AnnAssign_semicolon(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_semicolon(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_Annotation(self, node: "Annotation") -> Optional[bool]: - pass - - @mark_no_op - def visit_Annotation_annotation(self, node: "Annotation") -> None: - pass - - @mark_no_op - def leave_Annotation_annotation(self, node: "Annotation") -> None: - pass - - @mark_no_op - def visit_Annotation_whitespace_before_indicator(self, node: "Annotation") -> None: - pass - - @mark_no_op - def leave_Annotation_whitespace_before_indicator(self, node: "Annotation") -> None: - pass - - @mark_no_op - def visit_Annotation_whitespace_after_indicator(self, node: "Annotation") -> None: - pass - - @mark_no_op - def leave_Annotation_whitespace_after_indicator(self, node: "Annotation") -> None: - pass - - @mark_no_op - def visit_Arg(self, node: "Arg") -> Optional[bool]: - pass - - @mark_no_op - def visit_Arg_value(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_value(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_keyword(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_keyword(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_equal(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_equal(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_comma(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_comma(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_star(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_star(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_whitespace_after_star(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_whitespace_after_star(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_whitespace_after_arg(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_whitespace_after_arg(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_AsName(self, node: "AsName") -> Optional[bool]: - pass - - @mark_no_op - def visit_AsName_name(self, node: "AsName") -> None: - pass - - @mark_no_op - def leave_AsName_name(self, node: "AsName") -> None: - pass - - @mark_no_op - def visit_AsName_whitespace_before_as(self, node: "AsName") -> None: - pass - - @mark_no_op - def leave_AsName_whitespace_before_as(self, node: "AsName") -> None: - pass - - @mark_no_op - def visit_AsName_whitespace_after_as(self, node: "AsName") -> None: - pass - - @mark_no_op - def leave_AsName_whitespace_after_as(self, node: "AsName") -> None: - pass - - @mark_no_op - def visit_Assert(self, node: "Assert") -> Optional[bool]: - pass - - @mark_no_op - def visit_Assert_test(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_test(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assert_msg(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_msg(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assert_comma(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_comma(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assert_whitespace_after_assert(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_whitespace_after_assert(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assert_semicolon(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_semicolon(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assign(self, node: "Assign") -> Optional[bool]: - pass - - @mark_no_op - def visit_Assign_targets(self, node: "Assign") -> None: - pass - - @mark_no_op - def leave_Assign_targets(self, node: "Assign") -> None: - pass - - @mark_no_op - def visit_Assign_value(self, node: "Assign") -> None: - pass - - @mark_no_op - def leave_Assign_value(self, node: "Assign") -> None: - pass - - @mark_no_op - def visit_Assign_semicolon(self, node: "Assign") -> None: - pass - - @mark_no_op - def leave_Assign_semicolon(self, node: "Assign") -> None: - pass - - @mark_no_op - def visit_AssignEqual(self, node: "AssignEqual") -> Optional[bool]: - pass - - @mark_no_op - def visit_AssignEqual_whitespace_before(self, node: "AssignEqual") -> None: - pass - - @mark_no_op - def leave_AssignEqual_whitespace_before(self, node: "AssignEqual") -> None: - pass - - @mark_no_op - def visit_AssignEqual_whitespace_after(self, node: "AssignEqual") -> None: - pass - - @mark_no_op - def leave_AssignEqual_whitespace_after(self, node: "AssignEqual") -> None: - pass - - @mark_no_op - def visit_AssignTarget(self, node: "AssignTarget") -> Optional[bool]: - pass - - @mark_no_op - def visit_AssignTarget_target(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def leave_AssignTarget_target(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def visit_AssignTarget_whitespace_before_equal(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def leave_AssignTarget_whitespace_before_equal(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def visit_AssignTarget_whitespace_after_equal(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def leave_AssignTarget_whitespace_after_equal(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def visit_Asynchronous(self, node: "Asynchronous") -> Optional[bool]: - pass - - @mark_no_op - def visit_Asynchronous_whitespace_after(self, node: "Asynchronous") -> None: - pass - - @mark_no_op - def leave_Asynchronous_whitespace_after(self, node: "Asynchronous") -> None: - pass - - @mark_no_op - def visit_Attribute(self, node: "Attribute") -> Optional[bool]: - pass - - @mark_no_op - def visit_Attribute_value(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_value(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_Attribute_attr(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_attr(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_Attribute_dot(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_dot(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_Attribute_lpar(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_lpar(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_Attribute_rpar(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_rpar(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_AugAssign(self, node: "AugAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_AugAssign_target(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_AugAssign_target(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def visit_AugAssign_operator(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_AugAssign_operator(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def visit_AugAssign_value(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_AugAssign_value(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def visit_AugAssign_semicolon(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_AugAssign_semicolon(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def visit_Await(self, node: "Await") -> Optional[bool]: - pass - - @mark_no_op - def visit_Await_expression(self, node: "Await") -> None: - pass - - @mark_no_op - def leave_Await_expression(self, node: "Await") -> None: - pass - - @mark_no_op - def visit_Await_lpar(self, node: "Await") -> None: - pass - - @mark_no_op - def leave_Await_lpar(self, node: "Await") -> None: - pass - - @mark_no_op - def visit_Await_rpar(self, node: "Await") -> None: - pass - - @mark_no_op - def leave_Await_rpar(self, node: "Await") -> None: - pass - - @mark_no_op - def visit_Await_whitespace_after_await(self, node: "Await") -> None: - pass - - @mark_no_op - def leave_Await_whitespace_after_await(self, node: "Await") -> None: - pass - - @mark_no_op - def visit_BinaryOperation(self, node: "BinaryOperation") -> Optional[bool]: - pass - - @mark_no_op - def visit_BinaryOperation_left(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_left(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BinaryOperation_operator(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_operator(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BinaryOperation_right(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_right(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BinaryOperation_lpar(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_lpar(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BinaryOperation_rpar(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_rpar(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BitAnd(self, node: "BitAnd") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitAnd_whitespace_before(self, node: "BitAnd") -> None: - pass - - @mark_no_op - def leave_BitAnd_whitespace_before(self, node: "BitAnd") -> None: - pass - - @mark_no_op - def visit_BitAnd_whitespace_after(self, node: "BitAnd") -> None: - pass - - @mark_no_op - def leave_BitAnd_whitespace_after(self, node: "BitAnd") -> None: - pass - - @mark_no_op - def visit_BitAndAssign(self, node: "BitAndAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitAndAssign_whitespace_before(self, node: "BitAndAssign") -> None: - pass - - @mark_no_op - def leave_BitAndAssign_whitespace_before(self, node: "BitAndAssign") -> None: - pass - - @mark_no_op - def visit_BitAndAssign_whitespace_after(self, node: "BitAndAssign") -> None: - pass - - @mark_no_op - def leave_BitAndAssign_whitespace_after(self, node: "BitAndAssign") -> None: - pass - - @mark_no_op - def visit_BitInvert(self, node: "BitInvert") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitInvert_whitespace_after(self, node: "BitInvert") -> None: - pass - - @mark_no_op - def leave_BitInvert_whitespace_after(self, node: "BitInvert") -> None: - pass - - @mark_no_op - def visit_BitOr(self, node: "BitOr") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitOr_whitespace_before(self, node: "BitOr") -> None: - pass - - @mark_no_op - def leave_BitOr_whitespace_before(self, node: "BitOr") -> None: - pass - - @mark_no_op - def visit_BitOr_whitespace_after(self, node: "BitOr") -> None: - pass - - @mark_no_op - def leave_BitOr_whitespace_after(self, node: "BitOr") -> None: - pass - - @mark_no_op - def visit_BitOrAssign(self, node: "BitOrAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitOrAssign_whitespace_before(self, node: "BitOrAssign") -> None: - pass - - @mark_no_op - def leave_BitOrAssign_whitespace_before(self, node: "BitOrAssign") -> None: - pass - - @mark_no_op - def visit_BitOrAssign_whitespace_after(self, node: "BitOrAssign") -> None: - pass - - @mark_no_op - def leave_BitOrAssign_whitespace_after(self, node: "BitOrAssign") -> None: - pass - - @mark_no_op - def visit_BitXor(self, node: "BitXor") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitXor_whitespace_before(self, node: "BitXor") -> None: - pass - - @mark_no_op - def leave_BitXor_whitespace_before(self, node: "BitXor") -> None: - pass - - @mark_no_op - def visit_BitXor_whitespace_after(self, node: "BitXor") -> None: - pass - - @mark_no_op - def leave_BitXor_whitespace_after(self, node: "BitXor") -> None: - pass - - @mark_no_op - def visit_BitXorAssign(self, node: "BitXorAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitXorAssign_whitespace_before(self, node: "BitXorAssign") -> None: - pass - - @mark_no_op - def leave_BitXorAssign_whitespace_before(self, node: "BitXorAssign") -> None: - pass - - @mark_no_op - def visit_BitXorAssign_whitespace_after(self, node: "BitXorAssign") -> None: - pass - - @mark_no_op - def leave_BitXorAssign_whitespace_after(self, node: "BitXorAssign") -> None: - pass - - @mark_no_op - def visit_BooleanOperation(self, node: "BooleanOperation") -> Optional[bool]: - pass - - @mark_no_op - def visit_BooleanOperation_left(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_left(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_BooleanOperation_operator(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_operator(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_BooleanOperation_right(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_right(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_BooleanOperation_lpar(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_lpar(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_BooleanOperation_rpar(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_rpar(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_Break(self, node: "Break") -> Optional[bool]: - pass - - @mark_no_op - def visit_Break_semicolon(self, node: "Break") -> None: - pass - - @mark_no_op - def leave_Break_semicolon(self, node: "Break") -> None: - pass - - @mark_no_op - def visit_Call(self, node: "Call") -> Optional[bool]: - pass - - @mark_no_op - def visit_Call_func(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_func(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_args(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_args(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_lpar(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_lpar(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_rpar(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_rpar(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_whitespace_after_func(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_whitespace_after_func(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_whitespace_before_args(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_whitespace_before_args(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_ClassDef(self, node: "ClassDef") -> Optional[bool]: - pass - - @mark_no_op - def visit_ClassDef_name(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_name(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_body(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_body(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_bases(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_bases(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_keywords(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_keywords(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_decorators(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_decorators(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_lpar(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_lpar(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_rpar(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_rpar(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_leading_lines(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_leading_lines(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_lines_after_decorators(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_lines_after_decorators(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_whitespace_after_class(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_whitespace_after_class(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_whitespace_after_name(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_whitespace_after_name(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_Colon(self, node: "Colon") -> Optional[bool]: - pass - - @mark_no_op - def visit_Colon_whitespace_before(self, node: "Colon") -> None: - pass - - @mark_no_op - def leave_Colon_whitespace_before(self, node: "Colon") -> None: - pass - - @mark_no_op - def visit_Colon_whitespace_after(self, node: "Colon") -> None: - pass - - @mark_no_op - def leave_Colon_whitespace_after(self, node: "Colon") -> None: - pass - - @mark_no_op - def visit_Comma(self, node: "Comma") -> Optional[bool]: - pass - - @mark_no_op - def visit_Comma_whitespace_before(self, node: "Comma") -> None: - pass - - @mark_no_op - def leave_Comma_whitespace_before(self, node: "Comma") -> None: - pass - - @mark_no_op - def visit_Comma_whitespace_after(self, node: "Comma") -> None: - pass - - @mark_no_op - def leave_Comma_whitespace_after(self, node: "Comma") -> None: - pass - - @mark_no_op - def visit_Comment(self, node: "Comment") -> Optional[bool]: - pass - - @mark_no_op - def visit_Comment_value(self, node: "Comment") -> None: - pass - - @mark_no_op - def leave_Comment_value(self, node: "Comment") -> None: - pass - - @mark_no_op - def visit_CompFor(self, node: "CompFor") -> Optional[bool]: - pass - - @mark_no_op - def visit_CompFor_target(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_target(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_iter(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_iter(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_ifs(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_ifs(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_inner_for_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_inner_for_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_asynchronous(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_asynchronous(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_whitespace_before(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_whitespace_before(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_whitespace_after_for(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_whitespace_after_for(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_whitespace_before_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_whitespace_before_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_whitespace_after_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_whitespace_after_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompIf(self, node: "CompIf") -> Optional[bool]: - pass - - @mark_no_op - def visit_CompIf_test(self, node: "CompIf") -> None: - pass - - @mark_no_op - def leave_CompIf_test(self, node: "CompIf") -> None: - pass - - @mark_no_op - def visit_CompIf_whitespace_before(self, node: "CompIf") -> None: - pass - - @mark_no_op - def leave_CompIf_whitespace_before(self, node: "CompIf") -> None: - pass - - @mark_no_op - def visit_CompIf_whitespace_before_test(self, node: "CompIf") -> None: - pass - - @mark_no_op - def leave_CompIf_whitespace_before_test(self, node: "CompIf") -> None: - pass - - @mark_no_op - def visit_Comparison(self, node: "Comparison") -> Optional[bool]: - pass - - @mark_no_op - def visit_Comparison_left(self, node: "Comparison") -> None: - pass - - @mark_no_op - def leave_Comparison_left(self, node: "Comparison") -> None: - pass - - @mark_no_op - def visit_Comparison_comparisons(self, node: "Comparison") -> None: - pass - - @mark_no_op - def leave_Comparison_comparisons(self, node: "Comparison") -> None: - pass - - @mark_no_op - def visit_Comparison_lpar(self, node: "Comparison") -> None: - pass - - @mark_no_op - def leave_Comparison_lpar(self, node: "Comparison") -> None: - pass - - @mark_no_op - def visit_Comparison_rpar(self, node: "Comparison") -> None: - pass - - @mark_no_op - def leave_Comparison_rpar(self, node: "Comparison") -> None: - pass - - @mark_no_op - def visit_ComparisonTarget(self, node: "ComparisonTarget") -> Optional[bool]: - pass - - @mark_no_op - def visit_ComparisonTarget_operator(self, node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def leave_ComparisonTarget_operator(self, node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def visit_ComparisonTarget_comparator(self, node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def leave_ComparisonTarget_comparator(self, node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString(self, node: "ConcatenatedString") -> Optional[bool]: - pass - - @mark_no_op - def visit_ConcatenatedString_left(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_left(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString_right(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_right(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString_lpar(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_lpar(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString_rpar(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_rpar(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString_whitespace_between( - self, node: "ConcatenatedString" - ) -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_whitespace_between( - self, node: "ConcatenatedString" - ) -> None: - pass - - @mark_no_op - def visit_Continue(self, node: "Continue") -> Optional[bool]: - pass - - @mark_no_op - def visit_Continue_semicolon(self, node: "Continue") -> None: - pass - - @mark_no_op - def leave_Continue_semicolon(self, node: "Continue") -> None: - pass - - @mark_no_op - def visit_Decorator(self, node: "Decorator") -> Optional[bool]: - pass - - @mark_no_op - def visit_Decorator_decorator(self, node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Decorator_decorator(self, node: "Decorator") -> None: - pass - - @mark_no_op - def visit_Decorator_leading_lines(self, node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Decorator_leading_lines(self, node: "Decorator") -> None: - pass - - @mark_no_op - def visit_Decorator_whitespace_after_at(self, node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Decorator_whitespace_after_at(self, node: "Decorator") -> None: - pass - - @mark_no_op - def visit_Decorator_trailing_whitespace(self, node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Decorator_trailing_whitespace(self, node: "Decorator") -> None: - pass - - @mark_no_op - def visit_Del(self, node: "Del") -> Optional[bool]: - pass - - @mark_no_op - def visit_Del_target(self, node: "Del") -> None: - pass - - @mark_no_op - def leave_Del_target(self, node: "Del") -> None: - pass - - @mark_no_op - def visit_Del_whitespace_after_del(self, node: "Del") -> None: - pass - - @mark_no_op - def leave_Del_whitespace_after_del(self, node: "Del") -> None: - pass - - @mark_no_op - def visit_Del_semicolon(self, node: "Del") -> None: - pass - - @mark_no_op - def leave_Del_semicolon(self, node: "Del") -> None: - pass - - @mark_no_op - def visit_Dict(self, node: "Dict") -> Optional[bool]: - pass - - @mark_no_op - def visit_Dict_elements(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_elements(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_Dict_lbrace(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_lbrace(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_Dict_rbrace(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_rbrace(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_Dict_lpar(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_lpar(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_Dict_rpar(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_rpar(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_DictComp(self, node: "DictComp") -> Optional[bool]: - pass - - @mark_no_op - def visit_DictComp_key(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_key(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_value(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_value(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_for_in(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_for_in(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_lbrace(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_lbrace(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_rbrace(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_rbrace(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_lpar(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_lpar(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_rpar(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_rpar(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_whitespace_before_colon(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_whitespace_before_colon(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_whitespace_after_colon(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_whitespace_after_colon(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictElement(self, node: "DictElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_DictElement_key(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_key(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_DictElement_value(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_value(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_DictElement_comma(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_comma(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_DictElement_whitespace_before_colon(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_whitespace_before_colon(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_DictElement_whitespace_after_colon(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_whitespace_after_colon(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_Divide(self, node: "Divide") -> Optional[bool]: - pass - - @mark_no_op - def visit_Divide_whitespace_before(self, node: "Divide") -> None: - pass - - @mark_no_op - def leave_Divide_whitespace_before(self, node: "Divide") -> None: - pass - - @mark_no_op - def visit_Divide_whitespace_after(self, node: "Divide") -> None: - pass - - @mark_no_op - def leave_Divide_whitespace_after(self, node: "Divide") -> None: - pass - - @mark_no_op - def visit_DivideAssign(self, node: "DivideAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_DivideAssign_whitespace_before(self, node: "DivideAssign") -> None: - pass - - @mark_no_op - def leave_DivideAssign_whitespace_before(self, node: "DivideAssign") -> None: - pass - - @mark_no_op - def visit_DivideAssign_whitespace_after(self, node: "DivideAssign") -> None: - pass - - @mark_no_op - def leave_DivideAssign_whitespace_after(self, node: "DivideAssign") -> None: - pass - - @mark_no_op - def visit_Dot(self, node: "Dot") -> Optional[bool]: - pass - - @mark_no_op - def visit_Dot_whitespace_before(self, node: "Dot") -> None: - pass - - @mark_no_op - def leave_Dot_whitespace_before(self, node: "Dot") -> None: - pass - - @mark_no_op - def visit_Dot_whitespace_after(self, node: "Dot") -> None: - pass - - @mark_no_op - def leave_Dot_whitespace_after(self, node: "Dot") -> None: - pass - - @mark_no_op - def visit_Element(self, node: "Element") -> Optional[bool]: - pass - - @mark_no_op - def visit_Element_value(self, node: "Element") -> None: - pass - - @mark_no_op - def leave_Element_value(self, node: "Element") -> None: - pass - - @mark_no_op - def visit_Element_comma(self, node: "Element") -> None: - pass - - @mark_no_op - def leave_Element_comma(self, node: "Element") -> None: - pass - - @mark_no_op - def visit_Ellipsis(self, node: "Ellipsis") -> Optional[bool]: - pass - - @mark_no_op - def visit_Ellipsis_lpar(self, node: "Ellipsis") -> None: - pass - - @mark_no_op - def leave_Ellipsis_lpar(self, node: "Ellipsis") -> None: - pass - - @mark_no_op - def visit_Ellipsis_rpar(self, node: "Ellipsis") -> None: - pass - - @mark_no_op - def leave_Ellipsis_rpar(self, node: "Ellipsis") -> None: - pass - - @mark_no_op - def visit_Else(self, node: "Else") -> Optional[bool]: - pass - - @mark_no_op - def visit_Else_body(self, node: "Else") -> None: - pass - - @mark_no_op - def leave_Else_body(self, node: "Else") -> None: - pass - - @mark_no_op - def visit_Else_leading_lines(self, node: "Else") -> None: - pass - - @mark_no_op - def leave_Else_leading_lines(self, node: "Else") -> None: - pass - - @mark_no_op - def visit_Else_whitespace_before_colon(self, node: "Else") -> None: - pass - - @mark_no_op - def leave_Else_whitespace_before_colon(self, node: "Else") -> None: - pass - - @mark_no_op - def visit_EmptyLine(self, node: "EmptyLine") -> Optional[bool]: - pass - - @mark_no_op - def visit_EmptyLine_indent(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_EmptyLine_indent(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def visit_EmptyLine_whitespace(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_EmptyLine_whitespace(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def visit_EmptyLine_comment(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_EmptyLine_comment(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def visit_EmptyLine_newline(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_EmptyLine_newline(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def visit_Equal(self, node: "Equal") -> Optional[bool]: - pass - - @mark_no_op - def visit_Equal_whitespace_before(self, node: "Equal") -> None: - pass - - @mark_no_op - def leave_Equal_whitespace_before(self, node: "Equal") -> None: - pass - - @mark_no_op - def visit_Equal_whitespace_after(self, node: "Equal") -> None: - pass - - @mark_no_op - def leave_Equal_whitespace_after(self, node: "Equal") -> None: - pass - - @mark_no_op - def visit_ExceptHandler(self, node: "ExceptHandler") -> Optional[bool]: - pass - - @mark_no_op - def visit_ExceptHandler_body(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptHandler_body(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def visit_ExceptHandler_type(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptHandler_type(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def visit_ExceptHandler_name(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptHandler_name(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def visit_ExceptHandler_leading_lines(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptHandler_leading_lines(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def visit_ExceptHandler_whitespace_after_except( - self, node: "ExceptHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptHandler_whitespace_after_except( - self, node: "ExceptHandler" - ) -> None: - pass - - @mark_no_op - def visit_ExceptHandler_whitespace_before_colon( - self, node: "ExceptHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptHandler_whitespace_before_colon( - self, node: "ExceptHandler" - ) -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler(self, node: "ExceptStarHandler") -> Optional[bool]: - pass - - @mark_no_op - def visit_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_whitespace_after_except( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_whitespace_after_except( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_whitespace_after_star( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_whitespace_after_star( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_whitespace_before_colon( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_whitespace_before_colon( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def visit_Expr(self, node: "Expr") -> Optional[bool]: - pass - - @mark_no_op - def visit_Expr_value(self, node: "Expr") -> None: - pass - - @mark_no_op - def leave_Expr_value(self, node: "Expr") -> None: - pass - - @mark_no_op - def visit_Expr_semicolon(self, node: "Expr") -> None: - pass - - @mark_no_op - def leave_Expr_semicolon(self, node: "Expr") -> None: - pass - - @mark_no_op - def visit_Finally(self, node: "Finally") -> Optional[bool]: - pass - - @mark_no_op - def visit_Finally_body(self, node: "Finally") -> None: - pass - - @mark_no_op - def leave_Finally_body(self, node: "Finally") -> None: - pass - - @mark_no_op - def visit_Finally_leading_lines(self, node: "Finally") -> None: - pass - - @mark_no_op - def leave_Finally_leading_lines(self, node: "Finally") -> None: - pass - - @mark_no_op - def visit_Finally_whitespace_before_colon(self, node: "Finally") -> None: - pass - - @mark_no_op - def leave_Finally_whitespace_before_colon(self, node: "Finally") -> None: - pass - - @mark_no_op - def visit_Float(self, node: "Float") -> Optional[bool]: - pass - - @mark_no_op - def visit_Float_value(self, node: "Float") -> None: - pass - - @mark_no_op - def leave_Float_value(self, node: "Float") -> None: - pass - - @mark_no_op - def visit_Float_lpar(self, node: "Float") -> None: - pass - - @mark_no_op - def leave_Float_lpar(self, node: "Float") -> None: - pass - - @mark_no_op - def visit_Float_rpar(self, node: "Float") -> None: - pass - - @mark_no_op - def leave_Float_rpar(self, node: "Float") -> None: - pass - - @mark_no_op - def visit_FloorDivide(self, node: "FloorDivide") -> Optional[bool]: - pass - - @mark_no_op - def visit_FloorDivide_whitespace_before(self, node: "FloorDivide") -> None: - pass - - @mark_no_op - def leave_FloorDivide_whitespace_before(self, node: "FloorDivide") -> None: - pass - - @mark_no_op - def visit_FloorDivide_whitespace_after(self, node: "FloorDivide") -> None: - pass - - @mark_no_op - def leave_FloorDivide_whitespace_after(self, node: "FloorDivide") -> None: - pass - - @mark_no_op - def visit_FloorDivideAssign(self, node: "FloorDivideAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_FloorDivideAssign_whitespace_before( - self, node: "FloorDivideAssign" - ) -> None: - pass - - @mark_no_op - def leave_FloorDivideAssign_whitespace_before( - self, node: "FloorDivideAssign" - ) -> None: - pass - - @mark_no_op - def visit_FloorDivideAssign_whitespace_after( - self, node: "FloorDivideAssign" - ) -> None: - pass - - @mark_no_op - def leave_FloorDivideAssign_whitespace_after( - self, node: "FloorDivideAssign" - ) -> None: - pass - - @mark_no_op - def visit_For(self, node: "For") -> Optional[bool]: - pass - - @mark_no_op - def visit_For_target(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_target(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_iter(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_iter(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_body(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_body(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_orelse(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_orelse(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_asynchronous(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_asynchronous(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_leading_lines(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_leading_lines(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_whitespace_after_for(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_whitespace_after_for(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_whitespace_before_in(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_whitespace_before_in(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_whitespace_after_in(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_whitespace_after_in(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_whitespace_before_colon(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_whitespace_before_colon(self, node: "For") -> None: - pass - - @mark_no_op - def visit_FormattedString(self, node: "FormattedString") -> Optional[bool]: - pass - - @mark_no_op - def visit_FormattedString_parts(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_parts(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedString_start(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_start(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedString_end(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_end(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedString_lpar(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_lpar(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedString_rpar(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_rpar(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression( - self, node: "FormattedStringExpression" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_FormattedStringExpression_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_conversion( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_conversion( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_format_spec( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_format_spec( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_whitespace_before_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_whitespace_before_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_whitespace_after_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_whitespace_after_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_equal( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_equal( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringText(self, node: "FormattedStringText") -> Optional[bool]: - pass - - @mark_no_op - def visit_FormattedStringText_value(self, node: "FormattedStringText") -> None: - pass - - @mark_no_op - def leave_FormattedStringText_value(self, node: "FormattedStringText") -> None: - pass - - @mark_no_op - def visit_From(self, node: "From") -> Optional[bool]: - pass - - @mark_no_op - def visit_From_item(self, node: "From") -> None: - pass - - @mark_no_op - def leave_From_item(self, node: "From") -> None: - pass - - @mark_no_op - def visit_From_whitespace_before_from(self, node: "From") -> None: - pass - - @mark_no_op - def leave_From_whitespace_before_from(self, node: "From") -> None: - pass - - @mark_no_op - def visit_From_whitespace_after_from(self, node: "From") -> None: - pass - - @mark_no_op - def leave_From_whitespace_after_from(self, node: "From") -> None: - pass - - @mark_no_op - def visit_FunctionDef(self, node: "FunctionDef") -> Optional[bool]: - pass - - @mark_no_op - def visit_FunctionDef_name(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_name(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_params(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_params(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_body(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_body(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_decorators(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_decorators(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_returns(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_returns(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_asynchronous(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_asynchronous(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_leading_lines(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_leading_lines(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_lines_after_decorators(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_lines_after_decorators(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_after_def(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_after_def(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_after_name(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_after_name(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_before_params(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_before_params(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_GeneratorExp(self, node: "GeneratorExp") -> Optional[bool]: - pass - - @mark_no_op - def visit_GeneratorExp_elt(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_GeneratorExp_elt(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def visit_GeneratorExp_for_in(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_GeneratorExp_for_in(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def visit_GeneratorExp_lpar(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_GeneratorExp_lpar(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def visit_GeneratorExp_rpar(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_GeneratorExp_rpar(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def visit_Global(self, node: "Global") -> Optional[bool]: - pass - - @mark_no_op - def visit_Global_names(self, node: "Global") -> None: - pass - - @mark_no_op - def leave_Global_names(self, node: "Global") -> None: - pass - - @mark_no_op - def visit_Global_whitespace_after_global(self, node: "Global") -> None: - pass - - @mark_no_op - def leave_Global_whitespace_after_global(self, node: "Global") -> None: - pass - - @mark_no_op - def visit_Global_semicolon(self, node: "Global") -> None: - pass - - @mark_no_op - def leave_Global_semicolon(self, node: "Global") -> None: - pass - - @mark_no_op - def visit_GreaterThan(self, node: "GreaterThan") -> Optional[bool]: - pass - - @mark_no_op - def visit_GreaterThan_whitespace_before(self, node: "GreaterThan") -> None: - pass - - @mark_no_op - def leave_GreaterThan_whitespace_before(self, node: "GreaterThan") -> None: - pass - - @mark_no_op - def visit_GreaterThan_whitespace_after(self, node: "GreaterThan") -> None: - pass - - @mark_no_op - def leave_GreaterThan_whitespace_after(self, node: "GreaterThan") -> None: - pass - - @mark_no_op - def visit_GreaterThanEqual(self, node: "GreaterThanEqual") -> Optional[bool]: - pass - - @mark_no_op - def visit_GreaterThanEqual_whitespace_before( - self, node: "GreaterThanEqual" - ) -> None: - pass - - @mark_no_op - def leave_GreaterThanEqual_whitespace_before( - self, node: "GreaterThanEqual" - ) -> None: - pass - - @mark_no_op - def visit_GreaterThanEqual_whitespace_after(self, node: "GreaterThanEqual") -> None: - pass - - @mark_no_op - def leave_GreaterThanEqual_whitespace_after(self, node: "GreaterThanEqual") -> None: - pass - - @mark_no_op - def visit_If(self, node: "If") -> Optional[bool]: - pass - - @mark_no_op - def visit_If_test(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_test(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_body(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_body(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_orelse(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_orelse(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_leading_lines(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_leading_lines(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_whitespace_before_test(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_whitespace_before_test(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_whitespace_after_test(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_whitespace_after_test(self, node: "If") -> None: - pass - - @mark_no_op - def visit_IfExp(self, node: "IfExp") -> Optional[bool]: - pass - - @mark_no_op - def visit_IfExp_test(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_test(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_body(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_body(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_orelse(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_orelse(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_lpar(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_lpar(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_rpar(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_rpar(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_whitespace_before_if(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_whitespace_before_if(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_whitespace_after_if(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_whitespace_after_if(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_whitespace_before_else(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_whitespace_before_else(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_whitespace_after_else(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_whitespace_after_else(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_Imaginary(self, node: "Imaginary") -> Optional[bool]: - pass - - @mark_no_op - def visit_Imaginary_value(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def leave_Imaginary_value(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def visit_Imaginary_lpar(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def leave_Imaginary_lpar(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def visit_Imaginary_rpar(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def leave_Imaginary_rpar(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def visit_Import(self, node: "Import") -> Optional[bool]: - pass - - @mark_no_op - def visit_Import_names(self, node: "Import") -> None: - pass - - @mark_no_op - def leave_Import_names(self, node: "Import") -> None: - pass - - @mark_no_op - def visit_Import_semicolon(self, node: "Import") -> None: - pass - - @mark_no_op - def leave_Import_semicolon(self, node: "Import") -> None: - pass - - @mark_no_op - def visit_Import_whitespace_after_import(self, node: "Import") -> None: - pass - - @mark_no_op - def leave_Import_whitespace_after_import(self, node: "Import") -> None: - pass - - @mark_no_op - def visit_ImportAlias(self, node: "ImportAlias") -> Optional[bool]: - pass - - @mark_no_op - def visit_ImportAlias_name(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def leave_ImportAlias_name(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def visit_ImportAlias_asname(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def leave_ImportAlias_asname(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def visit_ImportAlias_comma(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def leave_ImportAlias_comma(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def visit_ImportFrom(self, node: "ImportFrom") -> Optional[bool]: - pass - - @mark_no_op - def visit_ImportFrom_module(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_module(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_names(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_names(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_relative(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_relative(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_lpar(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_lpar(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_rpar(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_rpar(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_semicolon(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_semicolon(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_whitespace_after_from(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_whitespace_after_from(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_whitespace_before_import(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_whitespace_before_import(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_whitespace_after_import(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_whitespace_after_import(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportStar(self, node: "ImportStar") -> Optional[bool]: - pass - - @mark_no_op - def visit_In(self, node: "In") -> Optional[bool]: - pass - - @mark_no_op - def visit_In_whitespace_before(self, node: "In") -> None: - pass - - @mark_no_op - def leave_In_whitespace_before(self, node: "In") -> None: - pass - - @mark_no_op - def visit_In_whitespace_after(self, node: "In") -> None: - pass - - @mark_no_op - def leave_In_whitespace_after(self, node: "In") -> None: - pass - - @mark_no_op - def visit_IndentedBlock(self, node: "IndentedBlock") -> Optional[bool]: - pass - - @mark_no_op - def visit_IndentedBlock_body(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_IndentedBlock_body(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def visit_IndentedBlock_header(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_IndentedBlock_header(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def visit_IndentedBlock_indent(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_IndentedBlock_indent(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def visit_IndentedBlock_footer(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_IndentedBlock_footer(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def visit_Index(self, node: "Index") -> Optional[bool]: - pass - - @mark_no_op - def visit_Index_value(self, node: "Index") -> None: - pass - - @mark_no_op - def leave_Index_value(self, node: "Index") -> None: - pass - - @mark_no_op - def visit_Index_star(self, node: "Index") -> None: - pass - - @mark_no_op - def leave_Index_star(self, node: "Index") -> None: - pass - - @mark_no_op - def visit_Index_whitespace_after_star(self, node: "Index") -> None: - pass - - @mark_no_op - def leave_Index_whitespace_after_star(self, node: "Index") -> None: - pass - - @mark_no_op - def visit_Integer(self, node: "Integer") -> Optional[bool]: - pass - - @mark_no_op - def visit_Integer_value(self, node: "Integer") -> None: - pass - - @mark_no_op - def leave_Integer_value(self, node: "Integer") -> None: - pass - - @mark_no_op - def visit_Integer_lpar(self, node: "Integer") -> None: - pass - - @mark_no_op - def leave_Integer_lpar(self, node: "Integer") -> None: - pass - - @mark_no_op - def visit_Integer_rpar(self, node: "Integer") -> None: - pass - - @mark_no_op - def leave_Integer_rpar(self, node: "Integer") -> None: - pass - - @mark_no_op - def visit_Is(self, node: "Is") -> Optional[bool]: - pass - - @mark_no_op - def visit_Is_whitespace_before(self, node: "Is") -> None: - pass - - @mark_no_op - def leave_Is_whitespace_before(self, node: "Is") -> None: - pass - - @mark_no_op - def visit_Is_whitespace_after(self, node: "Is") -> None: - pass - - @mark_no_op - def leave_Is_whitespace_after(self, node: "Is") -> None: - pass - - @mark_no_op - def visit_IsNot(self, node: "IsNot") -> Optional[bool]: - pass - - @mark_no_op - def visit_IsNot_whitespace_before(self, node: "IsNot") -> None: - pass - - @mark_no_op - def leave_IsNot_whitespace_before(self, node: "IsNot") -> None: - pass - - @mark_no_op - def visit_IsNot_whitespace_between(self, node: "IsNot") -> None: - pass - - @mark_no_op - def leave_IsNot_whitespace_between(self, node: "IsNot") -> None: - pass - - @mark_no_op - def visit_IsNot_whitespace_after(self, node: "IsNot") -> None: - pass - - @mark_no_op - def leave_IsNot_whitespace_after(self, node: "IsNot") -> None: - pass - - @mark_no_op - def visit_Lambda(self, node: "Lambda") -> Optional[bool]: - pass - - @mark_no_op - def visit_Lambda_params(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_params(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_body(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_body(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_colon(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_colon(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_lpar(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_lpar(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_rpar(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_rpar(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_whitespace_after_lambda(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_whitespace_after_lambda(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_LeftCurlyBrace(self, node: "LeftCurlyBrace") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftCurlyBrace_whitespace_after(self, node: "LeftCurlyBrace") -> None: - pass - - @mark_no_op - def leave_LeftCurlyBrace_whitespace_after(self, node: "LeftCurlyBrace") -> None: - pass - - @mark_no_op - def visit_LeftParen(self, node: "LeftParen") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftParen_whitespace_after(self, node: "LeftParen") -> None: - pass - - @mark_no_op - def leave_LeftParen_whitespace_after(self, node: "LeftParen") -> None: - pass - - @mark_no_op - def visit_LeftShift(self, node: "LeftShift") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftShift_whitespace_before(self, node: "LeftShift") -> None: - pass - - @mark_no_op - def leave_LeftShift_whitespace_before(self, node: "LeftShift") -> None: - pass - - @mark_no_op - def visit_LeftShift_whitespace_after(self, node: "LeftShift") -> None: - pass - - @mark_no_op - def leave_LeftShift_whitespace_after(self, node: "LeftShift") -> None: - pass - - @mark_no_op - def visit_LeftShiftAssign(self, node: "LeftShiftAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftShiftAssign_whitespace_before(self, node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def leave_LeftShiftAssign_whitespace_before(self, node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def visit_LeftShiftAssign_whitespace_after(self, node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def leave_LeftShiftAssign_whitespace_after(self, node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def visit_LeftSquareBracket(self, node: "LeftSquareBracket") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftSquareBracket_whitespace_after( - self, node: "LeftSquareBracket" - ) -> None: - pass - - @mark_no_op - def leave_LeftSquareBracket_whitespace_after( - self, node: "LeftSquareBracket" - ) -> None: - pass - - @mark_no_op - def visit_LessThan(self, node: "LessThan") -> Optional[bool]: - pass - - @mark_no_op - def visit_LessThan_whitespace_before(self, node: "LessThan") -> None: - pass - - @mark_no_op - def leave_LessThan_whitespace_before(self, node: "LessThan") -> None: - pass - - @mark_no_op - def visit_LessThan_whitespace_after(self, node: "LessThan") -> None: - pass - - @mark_no_op - def leave_LessThan_whitespace_after(self, node: "LessThan") -> None: - pass - - @mark_no_op - def visit_LessThanEqual(self, node: "LessThanEqual") -> Optional[bool]: - pass - - @mark_no_op - def visit_LessThanEqual_whitespace_before(self, node: "LessThanEqual") -> None: - pass - - @mark_no_op - def leave_LessThanEqual_whitespace_before(self, node: "LessThanEqual") -> None: - pass - - @mark_no_op - def visit_LessThanEqual_whitespace_after(self, node: "LessThanEqual") -> None: - pass - - @mark_no_op - def leave_LessThanEqual_whitespace_after(self, node: "LessThanEqual") -> None: - pass - - @mark_no_op - def visit_List(self, node: "List") -> Optional[bool]: - pass - - @mark_no_op - def visit_List_elements(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_elements(self, node: "List") -> None: - pass - - @mark_no_op - def visit_List_lbracket(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_lbracket(self, node: "List") -> None: - pass - - @mark_no_op - def visit_List_rbracket(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_rbracket(self, node: "List") -> None: - pass - - @mark_no_op - def visit_List_lpar(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_lpar(self, node: "List") -> None: - pass - - @mark_no_op - def visit_List_rpar(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_rpar(self, node: "List") -> None: - pass - - @mark_no_op - def visit_ListComp(self, node: "ListComp") -> Optional[bool]: - pass - - @mark_no_op - def visit_ListComp_elt(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_elt(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_for_in(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_for_in(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_lbracket(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_lbracket(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_rbracket(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_rbracket(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_lpar(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_lpar(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_rpar(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_rpar(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_Match(self, node: "Match") -> Optional[bool]: - pass - - @mark_no_op - def visit_Match_subject(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_subject(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_cases(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_cases(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_leading_lines(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_leading_lines(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_whitespace_after_match(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_whitespace_after_match(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_whitespace_before_colon(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_whitespace_before_colon(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_whitespace_after_colon(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_whitespace_after_colon(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_indent(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_indent(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_footer(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_footer(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_MatchAs(self, node: "MatchAs") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchAs_pattern(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_pattern(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_name(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_name(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_lpar(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_lpar(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_rpar(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_rpar(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchCase(self, node: "MatchCase") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchCase_pattern(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_pattern(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_body(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_body(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_guard(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_guard(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_leading_lines(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_leading_lines(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchClass(self, node: "MatchClass") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchClass_cls(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_cls(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_patterns(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_patterns(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_kwds(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_kwds(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_lpar(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_lpar(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_rpar(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_rpar(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement(self, node: "MatchKeywordElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement_whitespace_before_equal( - self, node: "MatchKeywordElement" - ) -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_whitespace_before_equal( - self, node: "MatchKeywordElement" - ) -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement_whitespace_after_equal( - self, node: "MatchKeywordElement" - ) -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_whitespace_after_equal( - self, node: "MatchKeywordElement" - ) -> None: - pass - - @mark_no_op - def visit_MatchList(self, node: "MatchList") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchList_patterns(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_patterns(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchList_lbracket(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_lbracket(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchList_rbracket(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_rbracket(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchList_lpar(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_lpar(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchList_rpar(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_rpar(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchMapping(self, node: "MatchMapping") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchMapping_elements(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_elements(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_lbrace(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_lbrace(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_rbrace(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_rbrace(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_rest(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_rest(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_lpar(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_lpar(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_rpar(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_rpar(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMappingElement(self, node: "MatchMappingElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def visit_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def visit_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def visit_MatchMappingElement_whitespace_before_colon( - self, node: "MatchMappingElement" - ) -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_whitespace_before_colon( - self, node: "MatchMappingElement" - ) -> None: - pass - - @mark_no_op - def visit_MatchMappingElement_whitespace_after_colon( - self, node: "MatchMappingElement" - ) -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_whitespace_after_colon( - self, node: "MatchMappingElement" - ) -> None: - pass - - @mark_no_op - def visit_MatchOr(self, node: "MatchOr") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchOr_patterns(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def leave_MatchOr_patterns(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def visit_MatchOr_lpar(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def leave_MatchOr_lpar(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def visit_MatchOr_rpar(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def leave_MatchOr_rpar(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def visit_MatchOrElement(self, node: "MatchOrElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: - pass - - @mark_no_op - def leave_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: - pass - - @mark_no_op - def visit_MatchOrElement_separator(self, node: "MatchOrElement") -> None: - pass - - @mark_no_op - def leave_MatchOrElement_separator(self, node: "MatchOrElement") -> None: - pass - - @mark_no_op - def visit_MatchPattern(self, node: "MatchPattern") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchSequence(self, node: "MatchSequence") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchSequenceElement( - self, node: "MatchSequenceElement" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def leave_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def visit_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def leave_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def visit_MatchSingleton(self, node: "MatchSingleton") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchSingleton_value(self, node: "MatchSingleton") -> None: - pass - - @mark_no_op - def leave_MatchSingleton_value(self, node: "MatchSingleton") -> None: - pass - - @mark_no_op - def visit_MatchStar(self, node: "MatchStar") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchStar_name(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def leave_MatchStar_name(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def visit_MatchStar_comma(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def leave_MatchStar_comma(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def visit_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def leave_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def visit_MatchTuple(self, node: "MatchTuple") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchTuple_patterns(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def leave_MatchTuple_patterns(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def visit_MatchTuple_lpar(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def leave_MatchTuple_lpar(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def visit_MatchTuple_rpar(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def leave_MatchTuple_rpar(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def visit_MatchValue(self, node: "MatchValue") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchValue_value(self, node: "MatchValue") -> None: - pass - - @mark_no_op - def leave_MatchValue_value(self, node: "MatchValue") -> None: - pass - - @mark_no_op - def visit_MatrixMultiply(self, node: "MatrixMultiply") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatrixMultiply_whitespace_before(self, node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def leave_MatrixMultiply_whitespace_before(self, node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def visit_MatrixMultiply_whitespace_after(self, node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def leave_MatrixMultiply_whitespace_after(self, node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def visit_MatrixMultiplyAssign( - self, node: "MatrixMultiplyAssign" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_MatrixMultiplyAssign_whitespace_before( - self, node: "MatrixMultiplyAssign" - ) -> None: - pass - - @mark_no_op - def leave_MatrixMultiplyAssign_whitespace_before( - self, node: "MatrixMultiplyAssign" - ) -> None: - pass - - @mark_no_op - def visit_MatrixMultiplyAssign_whitespace_after( - self, node: "MatrixMultiplyAssign" - ) -> None: - pass - - @mark_no_op - def leave_MatrixMultiplyAssign_whitespace_after( - self, node: "MatrixMultiplyAssign" - ) -> None: - pass - - @mark_no_op - def visit_Minus(self, node: "Minus") -> Optional[bool]: - pass - - @mark_no_op - def visit_Minus_whitespace_after(self, node: "Minus") -> None: - pass - - @mark_no_op - def leave_Minus_whitespace_after(self, node: "Minus") -> None: - pass - - @mark_no_op - def visit_Module(self, node: "Module") -> Optional[bool]: - pass - - @mark_no_op - def visit_Module_body(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_body(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_header(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_header(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_footer(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_footer(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_encoding(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_encoding(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_default_indent(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_default_indent(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_default_newline(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_default_newline(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_has_trailing_newline(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_has_trailing_newline(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Modulo(self, node: "Modulo") -> Optional[bool]: - pass - - @mark_no_op - def visit_Modulo_whitespace_before(self, node: "Modulo") -> None: - pass - - @mark_no_op - def leave_Modulo_whitespace_before(self, node: "Modulo") -> None: - pass - - @mark_no_op - def visit_Modulo_whitespace_after(self, node: "Modulo") -> None: - pass - - @mark_no_op - def leave_Modulo_whitespace_after(self, node: "Modulo") -> None: - pass - - @mark_no_op - def visit_ModuloAssign(self, node: "ModuloAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_ModuloAssign_whitespace_before(self, node: "ModuloAssign") -> None: - pass - - @mark_no_op - def leave_ModuloAssign_whitespace_before(self, node: "ModuloAssign") -> None: - pass - - @mark_no_op - def visit_ModuloAssign_whitespace_after(self, node: "ModuloAssign") -> None: - pass - - @mark_no_op - def leave_ModuloAssign_whitespace_after(self, node: "ModuloAssign") -> None: - pass - - @mark_no_op - def visit_Multiply(self, node: "Multiply") -> Optional[bool]: - pass - - @mark_no_op - def visit_Multiply_whitespace_before(self, node: "Multiply") -> None: - pass - - @mark_no_op - def leave_Multiply_whitespace_before(self, node: "Multiply") -> None: - pass - - @mark_no_op - def visit_Multiply_whitespace_after(self, node: "Multiply") -> None: - pass - - @mark_no_op - def leave_Multiply_whitespace_after(self, node: "Multiply") -> None: - pass - - @mark_no_op - def visit_MultiplyAssign(self, node: "MultiplyAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_MultiplyAssign_whitespace_before(self, node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def leave_MultiplyAssign_whitespace_before(self, node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def visit_MultiplyAssign_whitespace_after(self, node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def leave_MultiplyAssign_whitespace_after(self, node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def visit_Name(self, node: "Name") -> Optional[bool]: - pass - - @mark_no_op - def visit_Name_value(self, node: "Name") -> None: - pass - - @mark_no_op - def leave_Name_value(self, node: "Name") -> None: - pass - - @mark_no_op - def visit_Name_lpar(self, node: "Name") -> None: - pass - - @mark_no_op - def leave_Name_lpar(self, node: "Name") -> None: - pass - - @mark_no_op - def visit_Name_rpar(self, node: "Name") -> None: - pass - - @mark_no_op - def leave_Name_rpar(self, node: "Name") -> None: - pass - - @mark_no_op - def visit_NameItem(self, node: "NameItem") -> Optional[bool]: - pass - - @mark_no_op - def visit_NameItem_name(self, node: "NameItem") -> None: - pass - - @mark_no_op - def leave_NameItem_name(self, node: "NameItem") -> None: - pass - - @mark_no_op - def visit_NameItem_comma(self, node: "NameItem") -> None: - pass - - @mark_no_op - def leave_NameItem_comma(self, node: "NameItem") -> None: - pass - - @mark_no_op - def visit_NamedExpr(self, node: "NamedExpr") -> Optional[bool]: - pass - - @mark_no_op - def visit_NamedExpr_target(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_target(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_value(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_value(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_lpar(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_lpar(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_rpar(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_rpar(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_whitespace_before_walrus(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_whitespace_before_walrus(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_whitespace_after_walrus(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_whitespace_after_walrus(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_Newline(self, node: "Newline") -> Optional[bool]: - pass - - @mark_no_op - def visit_Newline_value(self, node: "Newline") -> None: - pass - - @mark_no_op - def leave_Newline_value(self, node: "Newline") -> None: - pass - - @mark_no_op - def visit_Nonlocal(self, node: "Nonlocal") -> Optional[bool]: - pass - - @mark_no_op - def visit_Nonlocal_names(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def leave_Nonlocal_names(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def visit_Nonlocal_whitespace_after_nonlocal(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def leave_Nonlocal_whitespace_after_nonlocal(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def visit_Nonlocal_semicolon(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def leave_Nonlocal_semicolon(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def visit_Not(self, node: "Not") -> Optional[bool]: - pass - - @mark_no_op - def visit_Not_whitespace_after(self, node: "Not") -> None: - pass - - @mark_no_op - def leave_Not_whitespace_after(self, node: "Not") -> None: - pass - - @mark_no_op - def visit_NotEqual(self, node: "NotEqual") -> Optional[bool]: - pass - - @mark_no_op - def visit_NotEqual_value(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def leave_NotEqual_value(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def visit_NotEqual_whitespace_before(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def leave_NotEqual_whitespace_before(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def visit_NotEqual_whitespace_after(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def leave_NotEqual_whitespace_after(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def visit_NotIn(self, node: "NotIn") -> Optional[bool]: - pass - - @mark_no_op - def visit_NotIn_whitespace_before(self, node: "NotIn") -> None: - pass - - @mark_no_op - def leave_NotIn_whitespace_before(self, node: "NotIn") -> None: - pass - - @mark_no_op - def visit_NotIn_whitespace_between(self, node: "NotIn") -> None: - pass - - @mark_no_op - def leave_NotIn_whitespace_between(self, node: "NotIn") -> None: - pass - - @mark_no_op - def visit_NotIn_whitespace_after(self, node: "NotIn") -> None: - pass - - @mark_no_op - def leave_NotIn_whitespace_after(self, node: "NotIn") -> None: - pass - - @mark_no_op - def visit_Or(self, node: "Or") -> Optional[bool]: - pass - - @mark_no_op - def visit_Or_whitespace_before(self, node: "Or") -> None: - pass - - @mark_no_op - def leave_Or_whitespace_before(self, node: "Or") -> None: - pass - - @mark_no_op - def visit_Or_whitespace_after(self, node: "Or") -> None: - pass - - @mark_no_op - def leave_Or_whitespace_after(self, node: "Or") -> None: - pass - - @mark_no_op - def visit_Param(self, node: "Param") -> Optional[bool]: - pass - - @mark_no_op - def visit_Param_name(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_name(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_annotation(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_annotation(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_equal(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_equal(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_default(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_default(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_comma(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_comma(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_star(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_star(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_whitespace_after_star(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_whitespace_after_star(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_whitespace_after_param(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_whitespace_after_param(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_ParamSlash(self, node: "ParamSlash") -> Optional[bool]: - pass - - @mark_no_op - def visit_ParamSlash_comma(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def leave_ParamSlash_comma(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def visit_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def leave_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def visit_ParamStar(self, node: "ParamStar") -> Optional[bool]: - pass - - @mark_no_op - def visit_ParamStar_comma(self, node: "ParamStar") -> None: - pass - - @mark_no_op - def leave_ParamStar_comma(self, node: "ParamStar") -> None: - pass - - @mark_no_op - def visit_Parameters(self, node: "Parameters") -> Optional[bool]: - pass - - @mark_no_op - def visit_Parameters_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_star_arg(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_star_arg(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_kwonly_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_kwonly_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_star_kwarg(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_star_kwarg(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_posonly_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_posonly_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_posonly_ind(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_posonly_ind(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace( - self, node: "ParenthesizedWhitespace" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace_first_line( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace_first_line( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace_empty_lines( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace_empty_lines( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace_indent( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace_indent( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace_last_line( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace_last_line( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def visit_Pass(self, node: "Pass") -> Optional[bool]: - pass - - @mark_no_op - def visit_Pass_semicolon(self, node: "Pass") -> None: - pass - - @mark_no_op - def leave_Pass_semicolon(self, node: "Pass") -> None: - pass - - @mark_no_op - def visit_Plus(self, node: "Plus") -> Optional[bool]: - pass - - @mark_no_op - def visit_Plus_whitespace_after(self, node: "Plus") -> None: - pass - - @mark_no_op - def leave_Plus_whitespace_after(self, node: "Plus") -> None: - pass - - @mark_no_op - def visit_Power(self, node: "Power") -> Optional[bool]: - pass - - @mark_no_op - def visit_Power_whitespace_before(self, node: "Power") -> None: - pass - - @mark_no_op - def leave_Power_whitespace_before(self, node: "Power") -> None: - pass - - @mark_no_op - def visit_Power_whitespace_after(self, node: "Power") -> None: - pass - - @mark_no_op - def leave_Power_whitespace_after(self, node: "Power") -> None: - pass - - @mark_no_op - def visit_PowerAssign(self, node: "PowerAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_PowerAssign_whitespace_before(self, node: "PowerAssign") -> None: - pass - - @mark_no_op - def leave_PowerAssign_whitespace_before(self, node: "PowerAssign") -> None: - pass - - @mark_no_op - def visit_PowerAssign_whitespace_after(self, node: "PowerAssign") -> None: - pass - - @mark_no_op - def leave_PowerAssign_whitespace_after(self, node: "PowerAssign") -> None: - pass - - @mark_no_op - def visit_Raise(self, node: "Raise") -> Optional[bool]: - pass - - @mark_no_op - def visit_Raise_exc(self, node: "Raise") -> None: - pass - - @mark_no_op - def leave_Raise_exc(self, node: "Raise") -> None: - pass - - @mark_no_op - def visit_Raise_cause(self, node: "Raise") -> None: - pass - - @mark_no_op - def leave_Raise_cause(self, node: "Raise") -> None: - pass - - @mark_no_op - def visit_Raise_whitespace_after_raise(self, node: "Raise") -> None: - pass - - @mark_no_op - def leave_Raise_whitespace_after_raise(self, node: "Raise") -> None: - pass - - @mark_no_op - def visit_Raise_semicolon(self, node: "Raise") -> None: - pass - - @mark_no_op - def leave_Raise_semicolon(self, node: "Raise") -> None: - pass - - @mark_no_op - def visit_Return(self, node: "Return") -> Optional[bool]: - pass - - @mark_no_op - def visit_Return_value(self, node: "Return") -> None: - pass - - @mark_no_op - def leave_Return_value(self, node: "Return") -> None: - pass - - @mark_no_op - def visit_Return_whitespace_after_return(self, node: "Return") -> None: - pass - - @mark_no_op - def leave_Return_whitespace_after_return(self, node: "Return") -> None: - pass - - @mark_no_op - def visit_Return_semicolon(self, node: "Return") -> None: - pass - - @mark_no_op - def leave_Return_semicolon(self, node: "Return") -> None: - pass - - @mark_no_op - def visit_RightCurlyBrace(self, node: "RightCurlyBrace") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightCurlyBrace_whitespace_before(self, node: "RightCurlyBrace") -> None: - pass - - @mark_no_op - def leave_RightCurlyBrace_whitespace_before(self, node: "RightCurlyBrace") -> None: - pass - - @mark_no_op - def visit_RightParen(self, node: "RightParen") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightParen_whitespace_before(self, node: "RightParen") -> None: - pass - - @mark_no_op - def leave_RightParen_whitespace_before(self, node: "RightParen") -> None: - pass - - @mark_no_op - def visit_RightShift(self, node: "RightShift") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightShift_whitespace_before(self, node: "RightShift") -> None: - pass - - @mark_no_op - def leave_RightShift_whitespace_before(self, node: "RightShift") -> None: - pass - - @mark_no_op - def visit_RightShift_whitespace_after(self, node: "RightShift") -> None: - pass - - @mark_no_op - def leave_RightShift_whitespace_after(self, node: "RightShift") -> None: - pass - - @mark_no_op - def visit_RightShiftAssign(self, node: "RightShiftAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightShiftAssign_whitespace_before( - self, node: "RightShiftAssign" - ) -> None: - pass - - @mark_no_op - def leave_RightShiftAssign_whitespace_before( - self, node: "RightShiftAssign" - ) -> None: - pass - - @mark_no_op - def visit_RightShiftAssign_whitespace_after(self, node: "RightShiftAssign") -> None: - pass - - @mark_no_op - def leave_RightShiftAssign_whitespace_after(self, node: "RightShiftAssign") -> None: - pass - - @mark_no_op - def visit_RightSquareBracket(self, node: "RightSquareBracket") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightSquareBracket_whitespace_before( - self, node: "RightSquareBracket" - ) -> None: - pass - - @mark_no_op - def leave_RightSquareBracket_whitespace_before( - self, node: "RightSquareBracket" - ) -> None: - pass - - @mark_no_op - def visit_Semicolon(self, node: "Semicolon") -> Optional[bool]: - pass - - @mark_no_op - def visit_Semicolon_whitespace_before(self, node: "Semicolon") -> None: - pass - - @mark_no_op - def leave_Semicolon_whitespace_before(self, node: "Semicolon") -> None: - pass - - @mark_no_op - def visit_Semicolon_whitespace_after(self, node: "Semicolon") -> None: - pass - - @mark_no_op - def leave_Semicolon_whitespace_after(self, node: "Semicolon") -> None: - pass - - @mark_no_op - def visit_Set(self, node: "Set") -> Optional[bool]: - pass - - @mark_no_op - def visit_Set_elements(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_elements(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_Set_lbrace(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_lbrace(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_Set_rbrace(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_rbrace(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_Set_lpar(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_lpar(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_Set_rpar(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_rpar(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_SetComp(self, node: "SetComp") -> Optional[bool]: - pass - - @mark_no_op - def visit_SetComp_elt(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_elt(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_for_in(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_for_in(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_lbrace(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_lbrace(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_rbrace(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_rbrace(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_lpar(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_lpar(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_rpar(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_rpar(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SimpleStatementLine(self, node: "SimpleStatementLine") -> Optional[bool]: - pass - - @mark_no_op - def visit_SimpleStatementLine_body(self, node: "SimpleStatementLine") -> None: - pass - - @mark_no_op - def leave_SimpleStatementLine_body(self, node: "SimpleStatementLine") -> None: - pass - - @mark_no_op - def visit_SimpleStatementLine_leading_lines( - self, node: "SimpleStatementLine" - ) -> None: - pass - - @mark_no_op - def leave_SimpleStatementLine_leading_lines( - self, node: "SimpleStatementLine" - ) -> None: - pass - - @mark_no_op - def visit_SimpleStatementLine_trailing_whitespace( - self, node: "SimpleStatementLine" - ) -> None: - pass - - @mark_no_op - def leave_SimpleStatementLine_trailing_whitespace( - self, node: "SimpleStatementLine" - ) -> None: - pass - - @mark_no_op - def visit_SimpleStatementSuite( - self, node: "SimpleStatementSuite" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_SimpleStatementSuite_body(self, node: "SimpleStatementSuite") -> None: - pass - - @mark_no_op - def leave_SimpleStatementSuite_body(self, node: "SimpleStatementSuite") -> None: - pass - - @mark_no_op - def visit_SimpleStatementSuite_leading_whitespace( - self, node: "SimpleStatementSuite" - ) -> None: - pass - - @mark_no_op - def leave_SimpleStatementSuite_leading_whitespace( - self, node: "SimpleStatementSuite" - ) -> None: - pass - - @mark_no_op - def visit_SimpleStatementSuite_trailing_whitespace( - self, node: "SimpleStatementSuite" - ) -> None: - pass - - @mark_no_op - def leave_SimpleStatementSuite_trailing_whitespace( - self, node: "SimpleStatementSuite" - ) -> None: - pass - - @mark_no_op - def visit_SimpleString(self, node: "SimpleString") -> Optional[bool]: - pass - - @mark_no_op - def visit_SimpleString_value(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def leave_SimpleString_value(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def visit_SimpleString_lpar(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def leave_SimpleString_lpar(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def visit_SimpleString_rpar(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def leave_SimpleString_rpar(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def visit_SimpleWhitespace(self, node: "SimpleWhitespace") -> Optional[bool]: - pass - - @mark_no_op - def visit_SimpleWhitespace_value(self, node: "SimpleWhitespace") -> None: - pass - - @mark_no_op - def leave_SimpleWhitespace_value(self, node: "SimpleWhitespace") -> None: - pass - - @mark_no_op - def visit_Slice(self, node: "Slice") -> Optional[bool]: - pass - - @mark_no_op - def visit_Slice_lower(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_lower(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_Slice_upper(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_upper(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_Slice_step(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_step(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_Slice_first_colon(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_first_colon(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_Slice_second_colon(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_second_colon(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_StarredDictElement(self, node: "StarredDictElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_StarredDictElement_value(self, node: "StarredDictElement") -> None: - pass - - @mark_no_op - def leave_StarredDictElement_value(self, node: "StarredDictElement") -> None: - pass - - @mark_no_op - def visit_StarredDictElement_comma(self, node: "StarredDictElement") -> None: - pass - - @mark_no_op - def leave_StarredDictElement_comma(self, node: "StarredDictElement") -> None: - pass - - @mark_no_op - def visit_StarredDictElement_whitespace_before_value( - self, node: "StarredDictElement" - ) -> None: - pass - - @mark_no_op - def leave_StarredDictElement_whitespace_before_value( - self, node: "StarredDictElement" - ) -> None: - pass - - @mark_no_op - def visit_StarredElement(self, node: "StarredElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_StarredElement_value(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_StarredElement_value(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def visit_StarredElement_comma(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_StarredElement_comma(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def visit_StarredElement_lpar(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_StarredElement_lpar(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def visit_StarredElement_rpar(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_StarredElement_rpar(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def visit_StarredElement_whitespace_before_value( - self, node: "StarredElement" - ) -> None: - pass - - @mark_no_op - def leave_StarredElement_whitespace_before_value( - self, node: "StarredElement" - ) -> None: - pass - - @mark_no_op - def visit_Subscript(self, node: "Subscript") -> Optional[bool]: - pass - - @mark_no_op - def visit_Subscript_value(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_value(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_slice(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_slice(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_lbracket(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_lbracket(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_rbracket(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_rbracket(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_lpar(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_lpar(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_rpar(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_rpar(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_whitespace_after_value(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_whitespace_after_value(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_SubscriptElement(self, node: "SubscriptElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_SubscriptElement_slice(self, node: "SubscriptElement") -> None: - pass - - @mark_no_op - def leave_SubscriptElement_slice(self, node: "SubscriptElement") -> None: - pass - - @mark_no_op - def visit_SubscriptElement_comma(self, node: "SubscriptElement") -> None: - pass - - @mark_no_op - def leave_SubscriptElement_comma(self, node: "SubscriptElement") -> None: - pass - - @mark_no_op - def visit_Subtract(self, node: "Subtract") -> Optional[bool]: - pass - - @mark_no_op - def visit_Subtract_whitespace_before(self, node: "Subtract") -> None: - pass - - @mark_no_op - def leave_Subtract_whitespace_before(self, node: "Subtract") -> None: - pass - - @mark_no_op - def visit_Subtract_whitespace_after(self, node: "Subtract") -> None: - pass - - @mark_no_op - def leave_Subtract_whitespace_after(self, node: "Subtract") -> None: - pass - - @mark_no_op - def visit_SubtractAssign(self, node: "SubtractAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_SubtractAssign_whitespace_before(self, node: "SubtractAssign") -> None: - pass - - @mark_no_op - def leave_SubtractAssign_whitespace_before(self, node: "SubtractAssign") -> None: - pass - - @mark_no_op - def visit_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: - pass - - @mark_no_op - def leave_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: - pass - - @mark_no_op - def visit_TrailingWhitespace(self, node: "TrailingWhitespace") -> Optional[bool]: - pass - - @mark_no_op - def visit_TrailingWhitespace_whitespace(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def leave_TrailingWhitespace_whitespace(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def visit_TrailingWhitespace_comment(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def leave_TrailingWhitespace_comment(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def visit_TrailingWhitespace_newline(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def leave_TrailingWhitespace_newline(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def visit_Try(self, node: "Try") -> Optional[bool]: - pass - - @mark_no_op - def visit_Try_body(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_body(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_handlers(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_handlers(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_orelse(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_orelse(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_finalbody(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_finalbody(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_leading_lines(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_leading_lines(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_whitespace_before_colon(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_whitespace_before_colon(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_TryStar(self, node: "TryStar") -> Optional[bool]: - pass - - @mark_no_op - def visit_TryStar_body(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_body(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_handlers(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_handlers(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_orelse(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_orelse(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_finalbody(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_finalbody(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_leading_lines(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_leading_lines(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_Tuple(self, node: "Tuple") -> Optional[bool]: - pass - - @mark_no_op - def visit_Tuple_elements(self, node: "Tuple") -> None: - pass - - @mark_no_op - def leave_Tuple_elements(self, node: "Tuple") -> None: - pass - - @mark_no_op - def visit_Tuple_lpar(self, node: "Tuple") -> None: - pass - - @mark_no_op - def leave_Tuple_lpar(self, node: "Tuple") -> None: - pass - - @mark_no_op - def visit_Tuple_rpar(self, node: "Tuple") -> None: - pass - - @mark_no_op - def leave_Tuple_rpar(self, node: "Tuple") -> None: - pass - - @mark_no_op - def visit_UnaryOperation(self, node: "UnaryOperation") -> Optional[bool]: - pass - - @mark_no_op - def visit_UnaryOperation_operator(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_UnaryOperation_operator(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def visit_UnaryOperation_expression(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_UnaryOperation_expression(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def visit_UnaryOperation_lpar(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_UnaryOperation_lpar(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def visit_UnaryOperation_rpar(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_UnaryOperation_rpar(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def visit_While(self, node: "While") -> Optional[bool]: - pass - - @mark_no_op - def visit_While_test(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_test(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_body(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_body(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_orelse(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_orelse(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_leading_lines(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_leading_lines(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_whitespace_after_while(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_whitespace_after_while(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_whitespace_before_colon(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_whitespace_before_colon(self, node: "While") -> None: - pass - - @mark_no_op - def visit_With(self, node: "With") -> Optional[bool]: - pass - - @mark_no_op - def visit_With_items(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_items(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_body(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_body(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_asynchronous(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_asynchronous(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_leading_lines(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_leading_lines(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_lpar(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_lpar(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_rpar(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_rpar(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_whitespace_after_with(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_whitespace_after_with(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_whitespace_before_colon(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_whitespace_before_colon(self, node: "With") -> None: - pass - - @mark_no_op - def visit_WithItem(self, node: "WithItem") -> Optional[bool]: - pass - - @mark_no_op - def visit_WithItem_item(self, node: "WithItem") -> None: - pass - - @mark_no_op - def leave_WithItem_item(self, node: "WithItem") -> None: - pass - - @mark_no_op - def visit_WithItem_asname(self, node: "WithItem") -> None: - pass - - @mark_no_op - def leave_WithItem_asname(self, node: "WithItem") -> None: - pass - - @mark_no_op - def visit_WithItem_comma(self, node: "WithItem") -> None: - pass - - @mark_no_op - def leave_WithItem_comma(self, node: "WithItem") -> None: - pass - - @mark_no_op - def visit_Yield(self, node: "Yield") -> Optional[bool]: - pass - - @mark_no_op - def visit_Yield_value(self, node: "Yield") -> None: - pass - - @mark_no_op - def leave_Yield_value(self, node: "Yield") -> None: - pass - - @mark_no_op - def visit_Yield_lpar(self, node: "Yield") -> None: - pass - - @mark_no_op - def leave_Yield_lpar(self, node: "Yield") -> None: - pass - - @mark_no_op - def visit_Yield_rpar(self, node: "Yield") -> None: - pass - - @mark_no_op - def leave_Yield_rpar(self, node: "Yield") -> None: - pass - - @mark_no_op - def visit_Yield_whitespace_after_yield(self, node: "Yield") -> None: - pass - - @mark_no_op - def leave_Yield_whitespace_after_yield(self, node: "Yield") -> None: - pass - - -class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): - @mark_no_op - def leave_Add(self, original_node: "Add") -> None: - pass - - @mark_no_op - def leave_AddAssign(self, original_node: "AddAssign") -> None: - pass - - @mark_no_op - def leave_And(self, original_node: "And") -> None: - pass - - @mark_no_op - def leave_AnnAssign(self, original_node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_Annotation(self, original_node: "Annotation") -> None: - pass - - @mark_no_op - def leave_Arg(self, original_node: "Arg") -> None: - pass - - @mark_no_op - def leave_AsName(self, original_node: "AsName") -> None: - pass - - @mark_no_op - def leave_Assert(self, original_node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assign(self, original_node: "Assign") -> None: - pass - - @mark_no_op - def leave_AssignEqual(self, original_node: "AssignEqual") -> None: - pass - - @mark_no_op - def leave_AssignTarget(self, original_node: "AssignTarget") -> None: - pass - - @mark_no_op - def leave_Asynchronous(self, original_node: "Asynchronous") -> None: - pass - - @mark_no_op - def leave_Attribute(self, original_node: "Attribute") -> None: - pass - - @mark_no_op - def leave_AugAssign(self, original_node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_Await(self, original_node: "Await") -> None: - pass - - @mark_no_op - def leave_BinaryOperation(self, original_node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BitAnd(self, original_node: "BitAnd") -> None: - pass - - @mark_no_op - def leave_BitAndAssign(self, original_node: "BitAndAssign") -> None: - pass - - @mark_no_op - def leave_BitInvert(self, original_node: "BitInvert") -> None: - pass - - @mark_no_op - def leave_BitOr(self, original_node: "BitOr") -> None: - pass - - @mark_no_op - def leave_BitOrAssign(self, original_node: "BitOrAssign") -> None: - pass - - @mark_no_op - def leave_BitXor(self, original_node: "BitXor") -> None: - pass - - @mark_no_op - def leave_BitXorAssign(self, original_node: "BitXorAssign") -> None: - pass - - @mark_no_op - def leave_BooleanOperation(self, original_node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_Break(self, original_node: "Break") -> None: - pass - - @mark_no_op - def leave_Call(self, original_node: "Call") -> None: - pass - - @mark_no_op - def leave_ClassDef(self, original_node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_Colon(self, original_node: "Colon") -> None: - pass - - @mark_no_op - def leave_Comma(self, original_node: "Comma") -> None: - pass - - @mark_no_op - def leave_Comment(self, original_node: "Comment") -> None: - pass - - @mark_no_op - def leave_CompFor(self, original_node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompIf(self, original_node: "CompIf") -> None: - pass - - @mark_no_op - def leave_Comparison(self, original_node: "Comparison") -> None: - pass - - @mark_no_op - def leave_ComparisonTarget(self, original_node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString(self, original_node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_Continue(self, original_node: "Continue") -> None: - pass - - @mark_no_op - def leave_Decorator(self, original_node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Del(self, original_node: "Del") -> None: - pass - - @mark_no_op - def leave_Dict(self, original_node: "Dict") -> None: - pass - - @mark_no_op - def leave_DictComp(self, original_node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictElement(self, original_node: "DictElement") -> None: - pass - - @mark_no_op - def leave_Divide(self, original_node: "Divide") -> None: - pass - - @mark_no_op - def leave_DivideAssign(self, original_node: "DivideAssign") -> None: - pass - - @mark_no_op - def leave_Dot(self, original_node: "Dot") -> None: - pass - - @mark_no_op - def leave_Element(self, original_node: "Element") -> None: - pass - - @mark_no_op - def leave_Ellipsis(self, original_node: "Ellipsis") -> None: - pass - - @mark_no_op - def leave_Else(self, original_node: "Else") -> None: - pass - - @mark_no_op - def leave_EmptyLine(self, original_node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_Equal(self, original_node: "Equal") -> None: - pass - - @mark_no_op - def leave_ExceptHandler(self, original_node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler(self, original_node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_Expr(self, original_node: "Expr") -> None: - pass - - @mark_no_op - def leave_Finally(self, original_node: "Finally") -> None: - pass - - @mark_no_op - def leave_Float(self, original_node: "Float") -> None: - pass - - @mark_no_op - def leave_FloorDivide(self, original_node: "FloorDivide") -> None: - pass - - @mark_no_op - def leave_FloorDivideAssign(self, original_node: "FloorDivideAssign") -> None: - pass - - @mark_no_op - def leave_For(self, original_node: "For") -> None: - pass - - @mark_no_op - def leave_FormattedString(self, original_node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression( - self, original_node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringText(self, original_node: "FormattedStringText") -> None: - pass - - @mark_no_op - def leave_From(self, original_node: "From") -> None: - pass - - @mark_no_op - def leave_FunctionDef(self, original_node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_GeneratorExp(self, original_node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_Global(self, original_node: "Global") -> None: - pass - - @mark_no_op - def leave_GreaterThan(self, original_node: "GreaterThan") -> None: - pass - - @mark_no_op - def leave_GreaterThanEqual(self, original_node: "GreaterThanEqual") -> None: - pass - - @mark_no_op - def leave_If(self, original_node: "If") -> None: - pass - - @mark_no_op - def leave_IfExp(self, original_node: "IfExp") -> None: - pass - - @mark_no_op - def leave_Imaginary(self, original_node: "Imaginary") -> None: - pass - - @mark_no_op - def leave_Import(self, original_node: "Import") -> None: - pass - - @mark_no_op - def leave_ImportAlias(self, original_node: "ImportAlias") -> None: - pass - - @mark_no_op - def leave_ImportFrom(self, original_node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportStar(self, original_node: "ImportStar") -> None: - pass - - @mark_no_op - def leave_In(self, original_node: "In") -> None: - pass - - @mark_no_op - def leave_IndentedBlock(self, original_node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_Index(self, original_node: "Index") -> None: - pass - - @mark_no_op - def leave_Integer(self, original_node: "Integer") -> None: - pass - - @mark_no_op - def leave_Is(self, original_node: "Is") -> None: - pass - - @mark_no_op - def leave_IsNot(self, original_node: "IsNot") -> None: - pass - - @mark_no_op - def leave_Lambda(self, original_node: "Lambda") -> None: - pass - - @mark_no_op - def leave_LeftCurlyBrace(self, original_node: "LeftCurlyBrace") -> None: - pass - - @mark_no_op - def leave_LeftParen(self, original_node: "LeftParen") -> None: - pass - - @mark_no_op - def leave_LeftShift(self, original_node: "LeftShift") -> None: - pass - - @mark_no_op - def leave_LeftShiftAssign(self, original_node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def leave_LeftSquareBracket(self, original_node: "LeftSquareBracket") -> None: - pass - - @mark_no_op - def leave_LessThan(self, original_node: "LessThan") -> None: - pass - - @mark_no_op - def leave_LessThanEqual(self, original_node: "LessThanEqual") -> None: - pass - - @mark_no_op - def leave_List(self, original_node: "List") -> None: - pass - - @mark_no_op - def leave_ListComp(self, original_node: "ListComp") -> None: - pass - - @mark_no_op - def leave_Match(self, original_node: "Match") -> None: - pass - - @mark_no_op - def leave_MatchAs(self, original_node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchCase(self, original_node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchClass(self, original_node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement(self, original_node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def leave_MatchList(self, original_node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchMapping(self, original_node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMappingElement(self, original_node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def leave_MatchOr(self, original_node: "MatchOr") -> None: - pass - - @mark_no_op - def leave_MatchOrElement(self, original_node: "MatchOrElement") -> None: - pass - - @mark_no_op - def leave_MatchPattern(self, original_node: "MatchPattern") -> None: - pass - - @mark_no_op - def leave_MatchSequence(self, original_node: "MatchSequence") -> None: - pass - - @mark_no_op - def leave_MatchSequenceElement(self, original_node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def leave_MatchSingleton(self, original_node: "MatchSingleton") -> None: - pass - - @mark_no_op - def leave_MatchStar(self, original_node: "MatchStar") -> None: - pass - - @mark_no_op - def leave_MatchTuple(self, original_node: "MatchTuple") -> None: - pass - - @mark_no_op - def leave_MatchValue(self, original_node: "MatchValue") -> None: - pass - - @mark_no_op - def leave_MatrixMultiply(self, original_node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def leave_MatrixMultiplyAssign(self, original_node: "MatrixMultiplyAssign") -> None: - pass - - @mark_no_op - def leave_Minus(self, original_node: "Minus") -> None: - pass - - @mark_no_op - def leave_Module(self, original_node: "Module") -> None: - pass - - @mark_no_op - def leave_Modulo(self, original_node: "Modulo") -> None: - pass - - @mark_no_op - def leave_ModuloAssign(self, original_node: "ModuloAssign") -> None: - pass - - @mark_no_op - def leave_Multiply(self, original_node: "Multiply") -> None: - pass - - @mark_no_op - def leave_MultiplyAssign(self, original_node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def leave_Name(self, original_node: "Name") -> None: - pass - - @mark_no_op - def leave_NameItem(self, original_node: "NameItem") -> None: - pass - - @mark_no_op - def leave_NamedExpr(self, original_node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_Newline(self, original_node: "Newline") -> None: - pass - - @mark_no_op - def leave_Nonlocal(self, original_node: "Nonlocal") -> None: - pass - - @mark_no_op - def leave_Not(self, original_node: "Not") -> None: - pass - - @mark_no_op - def leave_NotEqual(self, original_node: "NotEqual") -> None: - pass - - @mark_no_op - def leave_NotIn(self, original_node: "NotIn") -> None: - pass - - @mark_no_op - def leave_Or(self, original_node: "Or") -> None: - pass - - @mark_no_op - def leave_Param(self, original_node: "Param") -> None: - pass - - @mark_no_op - def leave_ParamSlash(self, original_node: "ParamSlash") -> None: - pass - - @mark_no_op - def leave_ParamStar(self, original_node: "ParamStar") -> None: - pass - - @mark_no_op - def leave_Parameters(self, original_node: "Parameters") -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace( - self, original_node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_Pass(self, original_node: "Pass") -> None: - pass - - @mark_no_op - def leave_Plus(self, original_node: "Plus") -> None: - pass - - @mark_no_op - def leave_Power(self, original_node: "Power") -> None: - pass - - @mark_no_op - def leave_PowerAssign(self, original_node: "PowerAssign") -> None: - pass - - @mark_no_op - def leave_Raise(self, original_node: "Raise") -> None: - pass - - @mark_no_op - def leave_Return(self, original_node: "Return") -> None: - pass - - @mark_no_op - def leave_RightCurlyBrace(self, original_node: "RightCurlyBrace") -> None: - pass - - @mark_no_op - def leave_RightParen(self, original_node: "RightParen") -> None: - pass - - @mark_no_op - def leave_RightShift(self, original_node: "RightShift") -> None: - pass - - @mark_no_op - def leave_RightShiftAssign(self, original_node: "RightShiftAssign") -> None: - pass - - @mark_no_op - def leave_RightSquareBracket(self, original_node: "RightSquareBracket") -> None: - pass - - @mark_no_op - def leave_Semicolon(self, original_node: "Semicolon") -> None: - pass - - @mark_no_op - def leave_Set(self, original_node: "Set") -> None: - pass - - @mark_no_op - def leave_SetComp(self, original_node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SimpleStatementLine(self, original_node: "SimpleStatementLine") -> None: - pass - - @mark_no_op - def leave_SimpleStatementSuite(self, original_node: "SimpleStatementSuite") -> None: - pass - - @mark_no_op - def leave_SimpleString(self, original_node: "SimpleString") -> None: - pass - - @mark_no_op - def leave_SimpleWhitespace(self, original_node: "SimpleWhitespace") -> None: - pass - - @mark_no_op - def leave_Slice(self, original_node: "Slice") -> None: - pass - - @mark_no_op - def leave_StarredDictElement(self, original_node: "StarredDictElement") -> None: - pass - - @mark_no_op - def leave_StarredElement(self, original_node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_Subscript(self, original_node: "Subscript") -> None: - pass - - @mark_no_op - def leave_SubscriptElement(self, original_node: "SubscriptElement") -> None: - pass - - @mark_no_op - def leave_Subtract(self, original_node: "Subtract") -> None: - pass - - @mark_no_op - def leave_SubtractAssign(self, original_node: "SubtractAssign") -> None: - pass - - @mark_no_op - def leave_TrailingWhitespace(self, original_node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def leave_Try(self, original_node: "Try") -> None: - pass - - @mark_no_op - def leave_TryStar(self, original_node: "TryStar") -> None: - pass - - @mark_no_op - def leave_Tuple(self, original_node: "Tuple") -> None: - pass - - @mark_no_op - def leave_UnaryOperation(self, original_node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_While(self, original_node: "While") -> None: - pass - - @mark_no_op - def leave_With(self, original_node: "With") -> None: - pass - - @mark_no_op - def leave_WithItem(self, original_node: "WithItem") -> None: - pass - - @mark_no_op - def leave_Yield(self, original_node: "Yield") -> None: - pass - - -class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): - @mark_no_op - def leave_Add(self, original_node: "Add", updated_node: "Add") -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_AddAssign( - self, original_node: "AddAssign", updated_node: "AddAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_And(self, original_node: "And", updated_node: "And") -> "BaseBooleanOp": - return updated_node - - @mark_no_op - def leave_AnnAssign( - self, original_node: "AnnAssign", updated_node: "AnnAssign" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Annotation( - self, original_node: "Annotation", updated_node: "Annotation" - ) -> "Annotation": - return updated_node - - @mark_no_op - def leave_Arg( - self, original_node: "Arg", updated_node: "Arg" - ) -> Union["Arg", FlattenSentinel["Arg"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_AsName(self, original_node: "AsName", updated_node: "AsName") -> "AsName": - return updated_node - - @mark_no_op - def leave_Assert( - self, original_node: "Assert", updated_node: "Assert" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Assign( - self, original_node: "Assign", updated_node: "Assign" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_AssignEqual( - self, original_node: "AssignEqual", updated_node: "AssignEqual" - ) -> Union["AssignEqual", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_AssignTarget( - self, original_node: "AssignTarget", updated_node: "AssignTarget" - ) -> Union["AssignTarget", FlattenSentinel["AssignTarget"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Asynchronous( - self, original_node: "Asynchronous", updated_node: "Asynchronous" - ) -> "Asynchronous": - return updated_node - - @mark_no_op - def leave_Attribute( - self, original_node: "Attribute", updated_node: "Attribute" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_AugAssign( - self, original_node: "AugAssign", updated_node: "AugAssign" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Await( - self, original_node: "Await", updated_node: "Await" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_BinaryOperation( - self, original_node: "BinaryOperation", updated_node: "BinaryOperation" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_BitAnd( - self, original_node: "BitAnd", updated_node: "BitAnd" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_BitAndAssign( - self, original_node: "BitAndAssign", updated_node: "BitAndAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_BitInvert( - self, original_node: "BitInvert", updated_node: "BitInvert" - ) -> "BaseUnaryOp": - return updated_node - - @mark_no_op - def leave_BitOr( - self, original_node: "BitOr", updated_node: "BitOr" - ) -> Union["BaseBinaryOp", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_BitOrAssign( - self, original_node: "BitOrAssign", updated_node: "BitOrAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_BitXor( - self, original_node: "BitXor", updated_node: "BitXor" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_BitXorAssign( - self, original_node: "BitXorAssign", updated_node: "BitXorAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_BooleanOperation( - self, original_node: "BooleanOperation", updated_node: "BooleanOperation" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Break( - self, original_node: "Break", updated_node: "Break" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Call( - self, original_node: "Call", updated_node: "Call" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_ClassDef( - self, original_node: "ClassDef", updated_node: "ClassDef" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Colon( - self, original_node: "Colon", updated_node: "Colon" - ) -> Union["Colon", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Comma( - self, original_node: "Comma", updated_node: "Comma" - ) -> Union["Comma", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Comment( - self, original_node: "Comment", updated_node: "Comment" - ) -> "Comment": - return updated_node - - @mark_no_op - def leave_CompFor( - self, original_node: "CompFor", updated_node: "CompFor" - ) -> "CompFor": - return updated_node - - @mark_no_op - def leave_CompIf(self, original_node: "CompIf", updated_node: "CompIf") -> "CompIf": - return updated_node - - @mark_no_op - def leave_Comparison( - self, original_node: "Comparison", updated_node: "Comparison" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_ComparisonTarget( - self, original_node: "ComparisonTarget", updated_node: "ComparisonTarget" - ) -> Union[ - "ComparisonTarget", FlattenSentinel["ComparisonTarget"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_ConcatenatedString( - self, original_node: "ConcatenatedString", updated_node: "ConcatenatedString" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Continue( - self, original_node: "Continue", updated_node: "Continue" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Decorator( - self, original_node: "Decorator", updated_node: "Decorator" - ) -> Union["Decorator", FlattenSentinel["Decorator"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Del( - self, original_node: "Del", updated_node: "Del" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Dict( - self, original_node: "Dict", updated_node: "Dict" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_DictComp( - self, original_node: "DictComp", updated_node: "DictComp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_DictElement( - self, original_node: "DictElement", updated_node: "DictElement" - ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Divide( - self, original_node: "Divide", updated_node: "Divide" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_DivideAssign( - self, original_node: "DivideAssign", updated_node: "DivideAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Dot( - self, original_node: "Dot", updated_node: "Dot" - ) -> Union["Dot", FlattenSentinel["Dot"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Element( - self, original_node: "Element", updated_node: "Element" - ) -> Union["BaseElement", FlattenSentinel["BaseElement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Ellipsis( - self, original_node: "Ellipsis", updated_node: "Ellipsis" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Else(self, original_node: "Else", updated_node: "Else") -> "Else": - return updated_node - - @mark_no_op - def leave_EmptyLine( - self, original_node: "EmptyLine", updated_node: "EmptyLine" - ) -> Union["EmptyLine", FlattenSentinel["EmptyLine"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Equal( - self, original_node: "Equal", updated_node: "Equal" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_ExceptHandler( - self, original_node: "ExceptHandler", updated_node: "ExceptHandler" - ) -> Union["ExceptHandler", FlattenSentinel["ExceptHandler"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_ExceptStarHandler( - self, original_node: "ExceptStarHandler", updated_node: "ExceptStarHandler" - ) -> Union[ - "ExceptStarHandler", FlattenSentinel["ExceptStarHandler"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Expr( - self, original_node: "Expr", updated_node: "Expr" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Finally( - self, original_node: "Finally", updated_node: "Finally" - ) -> "Finally": - return updated_node - - @mark_no_op - def leave_Float( - self, original_node: "Float", updated_node: "Float" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_FloorDivide( - self, original_node: "FloorDivide", updated_node: "FloorDivide" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_FloorDivideAssign( - self, original_node: "FloorDivideAssign", updated_node: "FloorDivideAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_For( - self, original_node: "For", updated_node: "For" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_FormattedString( - self, original_node: "FormattedString", updated_node: "FormattedString" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_FormattedStringExpression( - self, - original_node: "FormattedStringExpression", - updated_node: "FormattedStringExpression", - ) -> Union[ - "BaseFormattedStringContent", - FlattenSentinel["BaseFormattedStringContent"], - RemovalSentinel, - ]: - return updated_node - - @mark_no_op - def leave_FormattedStringText( - self, original_node: "FormattedStringText", updated_node: "FormattedStringText" - ) -> Union[ - "BaseFormattedStringContent", - FlattenSentinel["BaseFormattedStringContent"], - RemovalSentinel, - ]: - return updated_node - - @mark_no_op - def leave_From(self, original_node: "From", updated_node: "From") -> "From": - return updated_node - - @mark_no_op - def leave_FunctionDef( - self, original_node: "FunctionDef", updated_node: "FunctionDef" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_GeneratorExp( - self, original_node: "GeneratorExp", updated_node: "GeneratorExp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Global( - self, original_node: "Global", updated_node: "Global" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_GreaterThan( - self, original_node: "GreaterThan", updated_node: "GreaterThan" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_GreaterThanEqual( - self, original_node: "GreaterThanEqual", updated_node: "GreaterThanEqual" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_If( - self, original_node: "If", updated_node: "If" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_IfExp( - self, original_node: "IfExp", updated_node: "IfExp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Imaginary( - self, original_node: "Imaginary", updated_node: "Imaginary" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Import( - self, original_node: "Import", updated_node: "Import" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_ImportAlias( - self, original_node: "ImportAlias", updated_node: "ImportAlias" - ) -> Union["ImportAlias", FlattenSentinel["ImportAlias"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_ImportFrom( - self, original_node: "ImportFrom", updated_node: "ImportFrom" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_ImportStar( - self, original_node: "ImportStar", updated_node: "ImportStar" - ) -> "ImportStar": - return updated_node - - @mark_no_op - def leave_In(self, original_node: "In", updated_node: "In") -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_IndentedBlock( - self, original_node: "IndentedBlock", updated_node: "IndentedBlock" - ) -> "BaseSuite": - return updated_node - - @mark_no_op - def leave_Index(self, original_node: "Index", updated_node: "Index") -> "BaseSlice": - return updated_node - - @mark_no_op - def leave_Integer( - self, original_node: "Integer", updated_node: "Integer" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Is(self, original_node: "Is", updated_node: "Is") -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_IsNot( - self, original_node: "IsNot", updated_node: "IsNot" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_Lambda( - self, original_node: "Lambda", updated_node: "Lambda" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_LeftCurlyBrace( - self, original_node: "LeftCurlyBrace", updated_node: "LeftCurlyBrace" - ) -> "LeftCurlyBrace": - return updated_node - - @mark_no_op - def leave_LeftParen( - self, original_node: "LeftParen", updated_node: "LeftParen" - ) -> Union[ - "LeftParen", MaybeSentinel, FlattenSentinel["LeftParen"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_LeftShift( - self, original_node: "LeftShift", updated_node: "LeftShift" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_LeftShiftAssign( - self, original_node: "LeftShiftAssign", updated_node: "LeftShiftAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_LeftSquareBracket( - self, original_node: "LeftSquareBracket", updated_node: "LeftSquareBracket" - ) -> "LeftSquareBracket": - return updated_node - - @mark_no_op - def leave_LessThan( - self, original_node: "LessThan", updated_node: "LessThan" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_LessThanEqual( - self, original_node: "LessThanEqual", updated_node: "LessThanEqual" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_List( - self, original_node: "List", updated_node: "List" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_ListComp( - self, original_node: "ListComp", updated_node: "ListComp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Match( - self, original_node: "Match", updated_node: "Match" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_MatchAs( - self, original_node: "MatchAs", updated_node: "MatchAs" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchCase( - self, original_node: "MatchCase", updated_node: "MatchCase" - ) -> "MatchCase": - return updated_node - - @mark_no_op - def leave_MatchClass( - self, original_node: "MatchClass", updated_node: "MatchClass" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchKeywordElement( - self, original_node: "MatchKeywordElement", updated_node: "MatchKeywordElement" - ) -> Union[ - "MatchKeywordElement", FlattenSentinel["MatchKeywordElement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_MatchList( - self, original_node: "MatchList", updated_node: "MatchList" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchMapping( - self, original_node: "MatchMapping", updated_node: "MatchMapping" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchMappingElement( - self, original_node: "MatchMappingElement", updated_node: "MatchMappingElement" - ) -> Union[ - "MatchMappingElement", FlattenSentinel["MatchMappingElement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_MatchOr( - self, original_node: "MatchOr", updated_node: "MatchOr" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchOrElement( - self, original_node: "MatchOrElement", updated_node: "MatchOrElement" - ) -> Union["MatchOrElement", FlattenSentinel["MatchOrElement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_MatchPattern( - self, original_node: "MatchPattern", updated_node: "MatchPattern" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchSequence( - self, original_node: "MatchSequence", updated_node: "MatchSequence" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchSequenceElement( - self, - original_node: "MatchSequenceElement", - updated_node: "MatchSequenceElement", - ) -> Union[ - "MatchSequenceElement", FlattenSentinel["MatchSequenceElement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_MatchSingleton( - self, original_node: "MatchSingleton", updated_node: "MatchSingleton" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchStar( - self, original_node: "MatchStar", updated_node: "MatchStar" - ) -> "MatchStar": - return updated_node - - @mark_no_op - def leave_MatchTuple( - self, original_node: "MatchTuple", updated_node: "MatchTuple" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchValue( - self, original_node: "MatchValue", updated_node: "MatchValue" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatrixMultiply( - self, original_node: "MatrixMultiply", updated_node: "MatrixMultiply" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_MatrixMultiplyAssign( - self, - original_node: "MatrixMultiplyAssign", - updated_node: "MatrixMultiplyAssign", - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Minus( - self, original_node: "Minus", updated_node: "Minus" - ) -> "BaseUnaryOp": - return updated_node - - @mark_no_op - def leave_Module(self, original_node: "Module", updated_node: "Module") -> "Module": - return updated_node - - @mark_no_op - def leave_Modulo( - self, original_node: "Modulo", updated_node: "Modulo" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_ModuloAssign( - self, original_node: "ModuloAssign", updated_node: "ModuloAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Multiply( - self, original_node: "Multiply", updated_node: "Multiply" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_MultiplyAssign( - self, original_node: "MultiplyAssign", updated_node: "MultiplyAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Name( - self, original_node: "Name", updated_node: "Name" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_NameItem( - self, original_node: "NameItem", updated_node: "NameItem" - ) -> Union["NameItem", FlattenSentinel["NameItem"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_NamedExpr( - self, original_node: "NamedExpr", updated_node: "NamedExpr" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Newline( - self, original_node: "Newline", updated_node: "Newline" - ) -> "Newline": - return updated_node - - @mark_no_op - def leave_Nonlocal( - self, original_node: "Nonlocal", updated_node: "Nonlocal" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Not(self, original_node: "Not", updated_node: "Not") -> "BaseUnaryOp": - return updated_node - - @mark_no_op - def leave_NotEqual( - self, original_node: "NotEqual", updated_node: "NotEqual" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_NotIn( - self, original_node: "NotIn", updated_node: "NotIn" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_Or(self, original_node: "Or", updated_node: "Or") -> "BaseBooleanOp": - return updated_node - - @mark_no_op - def leave_Param( - self, original_node: "Param", updated_node: "Param" - ) -> Union["Param", MaybeSentinel, FlattenSentinel["Param"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_ParamSlash( - self, original_node: "ParamSlash", updated_node: "ParamSlash" - ) -> Union["ParamSlash", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_ParamStar( - self, original_node: "ParamStar", updated_node: "ParamStar" - ) -> Union["ParamStar", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Parameters( - self, original_node: "Parameters", updated_node: "Parameters" - ) -> "Parameters": - return updated_node - - @mark_no_op - def leave_ParenthesizedWhitespace( - self, - original_node: "ParenthesizedWhitespace", - updated_node: "ParenthesizedWhitespace", - ) -> Union["BaseParenthesizableWhitespace", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Pass( - self, original_node: "Pass", updated_node: "Pass" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Plus(self, original_node: "Plus", updated_node: "Plus") -> "BaseUnaryOp": - return updated_node - - @mark_no_op - def leave_Power( - self, original_node: "Power", updated_node: "Power" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_PowerAssign( - self, original_node: "PowerAssign", updated_node: "PowerAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Raise( - self, original_node: "Raise", updated_node: "Raise" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Return( - self, original_node: "Return", updated_node: "Return" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_RightCurlyBrace( - self, original_node: "RightCurlyBrace", updated_node: "RightCurlyBrace" - ) -> "RightCurlyBrace": - return updated_node - - @mark_no_op - def leave_RightParen( - self, original_node: "RightParen", updated_node: "RightParen" - ) -> Union[ - "RightParen", MaybeSentinel, FlattenSentinel["RightParen"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_RightShift( - self, original_node: "RightShift", updated_node: "RightShift" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_RightShiftAssign( - self, original_node: "RightShiftAssign", updated_node: "RightShiftAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_RightSquareBracket( - self, original_node: "RightSquareBracket", updated_node: "RightSquareBracket" - ) -> "RightSquareBracket": - return updated_node - - @mark_no_op - def leave_Semicolon( - self, original_node: "Semicolon", updated_node: "Semicolon" - ) -> Union["Semicolon", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Set(self, original_node: "Set", updated_node: "Set") -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_SetComp( - self, original_node: "SetComp", updated_node: "SetComp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_SimpleStatementLine( - self, original_node: "SimpleStatementLine", updated_node: "SimpleStatementLine" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_SimpleStatementSuite( - self, - original_node: "SimpleStatementSuite", - updated_node: "SimpleStatementSuite", - ) -> "BaseSuite": - return updated_node - - @mark_no_op - def leave_SimpleString( - self, original_node: "SimpleString", updated_node: "SimpleString" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_SimpleWhitespace( - self, original_node: "SimpleWhitespace", updated_node: "SimpleWhitespace" - ) -> Union["BaseParenthesizableWhitespace", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Slice(self, original_node: "Slice", updated_node: "Slice") -> "BaseSlice": - return updated_node - - @mark_no_op - def leave_StarredDictElement( - self, original_node: "StarredDictElement", updated_node: "StarredDictElement" - ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_StarredElement( - self, original_node: "StarredElement", updated_node: "StarredElement" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Subscript( - self, original_node: "Subscript", updated_node: "Subscript" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_SubscriptElement( - self, original_node: "SubscriptElement", updated_node: "SubscriptElement" - ) -> Union[ - "SubscriptElement", FlattenSentinel["SubscriptElement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Subtract( - self, original_node: "Subtract", updated_node: "Subtract" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_SubtractAssign( - self, original_node: "SubtractAssign", updated_node: "SubtractAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_TrailingWhitespace( - self, original_node: "TrailingWhitespace", updated_node: "TrailingWhitespace" - ) -> "TrailingWhitespace": - return updated_node - - @mark_no_op - def leave_Try( - self, original_node: "Try", updated_node: "Try" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_TryStar( - self, original_node: "TryStar", updated_node: "TryStar" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Tuple( - self, original_node: "Tuple", updated_node: "Tuple" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_UnaryOperation( - self, original_node: "UnaryOperation", updated_node: "UnaryOperation" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_While( - self, original_node: "While", updated_node: "While" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_With( - self, original_node: "With", updated_node: "With" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_WithItem( - self, original_node: "WithItem", updated_node: "WithItem" - ) -> Union["WithItem", FlattenSentinel["WithItem"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Yield( - self, original_node: "Yield", updated_node: "Yield" - ) -> "BaseExpression": - return updated_node +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +# This file was generated by libcst.codegen.gen_matcher_classes +from typing import Optional, TYPE_CHECKING, Union + +from libcst._flatten_sentinel import FlattenSentinel +from libcst._maybe_sentinel import MaybeSentinel +from libcst._removal_sentinel import RemovalSentinel +from libcst._typed_visitor_base import mark_no_op + + +if TYPE_CHECKING: + from libcst._nodes.expression import ( # noqa: F401 + Annotation, + Arg, + Asynchronous, + Attribute, + Await, + BaseDictElement, + BaseElement, + BaseExpression, + BaseFormattedStringContent, + BaseSlice, + BinaryOperation, + BooleanOperation, + Call, + Comparison, + ComparisonTarget, + CompFor, + CompIf, + ConcatenatedString, + Dict, + DictComp, + DictElement, + Element, + Ellipsis, + Float, + FormattedString, + FormattedStringExpression, + FormattedStringText, + From, + GeneratorExp, + IfExp, + Imaginary, + Index, + Integer, + Lambda, + LeftCurlyBrace, + LeftParen, + LeftSquareBracket, + List, + ListComp, + Name, + NamedExpr, + Param, + Parameters, + ParamSlash, + ParamStar, + RightCurlyBrace, + RightParen, + RightSquareBracket, + Set, + SetComp, + SimpleString, + Slice, + StarredDictElement, + StarredElement, + Subscript, + SubscriptElement, + Tuple, + UnaryOperation, + Yield, + ) + from libcst._nodes.module import Module # noqa: F401 + from libcst._nodes.op import ( # noqa: F401 + Add, + AddAssign, + And, + AssignEqual, + BaseAugOp, + BaseBinaryOp, + BaseBooleanOp, + BaseCompOp, + BaseUnaryOp, + BitAnd, + BitAndAssign, + BitInvert, + BitOr, + BitOrAssign, + BitXor, + BitXorAssign, + Colon, + Comma, + Divide, + DivideAssign, + Dot, + Equal, + FloorDivide, + FloorDivideAssign, + GreaterThan, + GreaterThanEqual, + ImportStar, + In, + Is, + IsNot, + LeftShift, + LeftShiftAssign, + LessThan, + LessThanEqual, + MatrixMultiply, + MatrixMultiplyAssign, + Minus, + Modulo, + ModuloAssign, + Multiply, + MultiplyAssign, + Not, + NotEqual, + NotIn, + Or, + Plus, + Power, + PowerAssign, + RightShift, + RightShiftAssign, + Semicolon, + Subtract, + SubtractAssign, + ) + from libcst._nodes.statement import ( # noqa: F401 + AnnAssign, + AsName, + Assert, + Assign, + AssignTarget, + AugAssign, + BaseSmallStatement, + BaseStatement, + BaseSuite, + Break, + ClassDef, + Continue, + Decorator, + Del, + Else, + ExceptHandler, + ExceptStarHandler, + Expr, + Finally, + For, + FunctionDef, + Global, + If, + Import, + ImportAlias, + ImportFrom, + IndentedBlock, + Match, + MatchAs, + MatchCase, + MatchClass, + MatchKeywordElement, + MatchList, + MatchMapping, + MatchMappingElement, + MatchOr, + MatchOrElement, + MatchPattern, + MatchSequence, + MatchSequenceElement, + MatchSingleton, + MatchStar, + MatchTuple, + MatchValue, + NameItem, + Nonlocal, + Pass, + Raise, + Return, + SimpleStatementLine, + SimpleStatementSuite, + Try, + TryStar, + While, + With, + WithItem, + ) + from libcst._nodes.whitespace import ( # noqa: F401 + BaseParenthesizableWhitespace, + Comment, + EmptyLine, + Newline, + ParenthesizedWhitespace, + SimpleWhitespace, + TrailingWhitespace, + ) + + +class CSTTypedBaseFunctions: + @mark_no_op + def visit_Add(self, node: "Add") -> Optional[bool]: + pass + + @mark_no_op + def visit_Add_whitespace_before(self, node: "Add") -> None: + pass + + @mark_no_op + def leave_Add_whitespace_before(self, node: "Add") -> None: + pass + + @mark_no_op + def visit_Add_whitespace_after(self, node: "Add") -> None: + pass + + @mark_no_op + def leave_Add_whitespace_after(self, node: "Add") -> None: + pass + + @mark_no_op + def visit_AddAssign(self, node: "AddAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_AddAssign_whitespace_before(self, node: "AddAssign") -> None: + pass + + @mark_no_op + def leave_AddAssign_whitespace_before(self, node: "AddAssign") -> None: + pass + + @mark_no_op + def visit_AddAssign_whitespace_after(self, node: "AddAssign") -> None: + pass + + @mark_no_op + def leave_AddAssign_whitespace_after(self, node: "AddAssign") -> None: + pass + + @mark_no_op + def visit_And(self, node: "And") -> Optional[bool]: + pass + + @mark_no_op + def visit_And_whitespace_before(self, node: "And") -> None: + pass + + @mark_no_op + def leave_And_whitespace_before(self, node: "And") -> None: + pass + + @mark_no_op + def visit_And_whitespace_after(self, node: "And") -> None: + pass + + @mark_no_op + def leave_And_whitespace_after(self, node: "And") -> None: + pass + + @mark_no_op + def visit_AnnAssign(self, node: "AnnAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_AnnAssign_target(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_target(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_AnnAssign_annotation(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_annotation(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_AnnAssign_value(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_value(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_AnnAssign_equal(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_equal(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_AnnAssign_semicolon(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_semicolon(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_Annotation(self, node: "Annotation") -> Optional[bool]: + pass + + @mark_no_op + def visit_Annotation_annotation(self, node: "Annotation") -> None: + pass + + @mark_no_op + def leave_Annotation_annotation(self, node: "Annotation") -> None: + pass + + @mark_no_op + def visit_Annotation_whitespace_before_indicator(self, node: "Annotation") -> None: + pass + + @mark_no_op + def leave_Annotation_whitespace_before_indicator(self, node: "Annotation") -> None: + pass + + @mark_no_op + def visit_Annotation_whitespace_after_indicator(self, node: "Annotation") -> None: + pass + + @mark_no_op + def leave_Annotation_whitespace_after_indicator(self, node: "Annotation") -> None: + pass + + @mark_no_op + def visit_Arg(self, node: "Arg") -> Optional[bool]: + pass + + @mark_no_op + def visit_Arg_value(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_value(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_keyword(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_keyword(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_equal(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_equal(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_comma(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_comma(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_star(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_star(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_whitespace_after_star(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_whitespace_after_star(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_whitespace_after_arg(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_whitespace_after_arg(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_AsName(self, node: "AsName") -> Optional[bool]: + pass + + @mark_no_op + def visit_AsName_name(self, node: "AsName") -> None: + pass + + @mark_no_op + def leave_AsName_name(self, node: "AsName") -> None: + pass + + @mark_no_op + def visit_AsName_whitespace_before_as(self, node: "AsName") -> None: + pass + + @mark_no_op + def leave_AsName_whitespace_before_as(self, node: "AsName") -> None: + pass + + @mark_no_op + def visit_AsName_whitespace_after_as(self, node: "AsName") -> None: + pass + + @mark_no_op + def leave_AsName_whitespace_after_as(self, node: "AsName") -> None: + pass + + @mark_no_op + def visit_Assert(self, node: "Assert") -> Optional[bool]: + pass + + @mark_no_op + def visit_Assert_test(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_test(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assert_msg(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_msg(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assert_comma(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_comma(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assert_whitespace_after_assert(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_whitespace_after_assert(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assert_semicolon(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_semicolon(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assign(self, node: "Assign") -> Optional[bool]: + pass + + @mark_no_op + def visit_Assign_targets(self, node: "Assign") -> None: + pass + + @mark_no_op + def leave_Assign_targets(self, node: "Assign") -> None: + pass + + @mark_no_op + def visit_Assign_value(self, node: "Assign") -> None: + pass + + @mark_no_op + def leave_Assign_value(self, node: "Assign") -> None: + pass + + @mark_no_op + def visit_Assign_semicolon(self, node: "Assign") -> None: + pass + + @mark_no_op + def leave_Assign_semicolon(self, node: "Assign") -> None: + pass + + @mark_no_op + def visit_AssignEqual(self, node: "AssignEqual") -> Optional[bool]: + pass + + @mark_no_op + def visit_AssignEqual_whitespace_before(self, node: "AssignEqual") -> None: + pass + + @mark_no_op + def leave_AssignEqual_whitespace_before(self, node: "AssignEqual") -> None: + pass + + @mark_no_op + def visit_AssignEqual_whitespace_after(self, node: "AssignEqual") -> None: + pass + + @mark_no_op + def leave_AssignEqual_whitespace_after(self, node: "AssignEqual") -> None: + pass + + @mark_no_op + def visit_AssignTarget(self, node: "AssignTarget") -> Optional[bool]: + pass + + @mark_no_op + def visit_AssignTarget_target(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def leave_AssignTarget_target(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def visit_AssignTarget_whitespace_before_equal(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def leave_AssignTarget_whitespace_before_equal(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def visit_AssignTarget_whitespace_after_equal(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def leave_AssignTarget_whitespace_after_equal(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def visit_Asynchronous(self, node: "Asynchronous") -> Optional[bool]: + pass + + @mark_no_op + def visit_Asynchronous_whitespace_after(self, node: "Asynchronous") -> None: + pass + + @mark_no_op + def leave_Asynchronous_whitespace_after(self, node: "Asynchronous") -> None: + pass + + @mark_no_op + def visit_Attribute(self, node: "Attribute") -> Optional[bool]: + pass + + @mark_no_op + def visit_Attribute_value(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_value(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_Attribute_attr(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_attr(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_Attribute_dot(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_dot(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_Attribute_lpar(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_lpar(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_Attribute_rpar(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_rpar(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_AugAssign(self, node: "AugAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_AugAssign_target(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_AugAssign_target(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def visit_AugAssign_operator(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_AugAssign_operator(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def visit_AugAssign_value(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_AugAssign_value(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def visit_AugAssign_semicolon(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_AugAssign_semicolon(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def visit_Await(self, node: "Await") -> Optional[bool]: + pass + + @mark_no_op + def visit_Await_expression(self, node: "Await") -> None: + pass + + @mark_no_op + def leave_Await_expression(self, node: "Await") -> None: + pass + + @mark_no_op + def visit_Await_lpar(self, node: "Await") -> None: + pass + + @mark_no_op + def leave_Await_lpar(self, node: "Await") -> None: + pass + + @mark_no_op + def visit_Await_rpar(self, node: "Await") -> None: + pass + + @mark_no_op + def leave_Await_rpar(self, node: "Await") -> None: + pass + + @mark_no_op + def visit_Await_whitespace_after_await(self, node: "Await") -> None: + pass + + @mark_no_op + def leave_Await_whitespace_after_await(self, node: "Await") -> None: + pass + + @mark_no_op + def visit_BinaryOperation(self, node: "BinaryOperation") -> Optional[bool]: + pass + + @mark_no_op + def visit_BinaryOperation_left(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_left(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BinaryOperation_operator(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_operator(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BinaryOperation_right(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_right(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BinaryOperation_lpar(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_lpar(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BinaryOperation_rpar(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_rpar(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BitAnd(self, node: "BitAnd") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitAnd_whitespace_before(self, node: "BitAnd") -> None: + pass + + @mark_no_op + def leave_BitAnd_whitespace_before(self, node: "BitAnd") -> None: + pass + + @mark_no_op + def visit_BitAnd_whitespace_after(self, node: "BitAnd") -> None: + pass + + @mark_no_op + def leave_BitAnd_whitespace_after(self, node: "BitAnd") -> None: + pass + + @mark_no_op + def visit_BitAndAssign(self, node: "BitAndAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitAndAssign_whitespace_before(self, node: "BitAndAssign") -> None: + pass + + @mark_no_op + def leave_BitAndAssign_whitespace_before(self, node: "BitAndAssign") -> None: + pass + + @mark_no_op + def visit_BitAndAssign_whitespace_after(self, node: "BitAndAssign") -> None: + pass + + @mark_no_op + def leave_BitAndAssign_whitespace_after(self, node: "BitAndAssign") -> None: + pass + + @mark_no_op + def visit_BitInvert(self, node: "BitInvert") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitInvert_whitespace_after(self, node: "BitInvert") -> None: + pass + + @mark_no_op + def leave_BitInvert_whitespace_after(self, node: "BitInvert") -> None: + pass + + @mark_no_op + def visit_BitOr(self, node: "BitOr") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitOr_whitespace_before(self, node: "BitOr") -> None: + pass + + @mark_no_op + def leave_BitOr_whitespace_before(self, node: "BitOr") -> None: + pass + + @mark_no_op + def visit_BitOr_whitespace_after(self, node: "BitOr") -> None: + pass + + @mark_no_op + def leave_BitOr_whitespace_after(self, node: "BitOr") -> None: + pass + + @mark_no_op + def visit_BitOrAssign(self, node: "BitOrAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitOrAssign_whitespace_before(self, node: "BitOrAssign") -> None: + pass + + @mark_no_op + def leave_BitOrAssign_whitespace_before(self, node: "BitOrAssign") -> None: + pass + + @mark_no_op + def visit_BitOrAssign_whitespace_after(self, node: "BitOrAssign") -> None: + pass + + @mark_no_op + def leave_BitOrAssign_whitespace_after(self, node: "BitOrAssign") -> None: + pass + + @mark_no_op + def visit_BitXor(self, node: "BitXor") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitXor_whitespace_before(self, node: "BitXor") -> None: + pass + + @mark_no_op + def leave_BitXor_whitespace_before(self, node: "BitXor") -> None: + pass + + @mark_no_op + def visit_BitXor_whitespace_after(self, node: "BitXor") -> None: + pass + + @mark_no_op + def leave_BitXor_whitespace_after(self, node: "BitXor") -> None: + pass + + @mark_no_op + def visit_BitXorAssign(self, node: "BitXorAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitXorAssign_whitespace_before(self, node: "BitXorAssign") -> None: + pass + + @mark_no_op + def leave_BitXorAssign_whitespace_before(self, node: "BitXorAssign") -> None: + pass + + @mark_no_op + def visit_BitXorAssign_whitespace_after(self, node: "BitXorAssign") -> None: + pass + + @mark_no_op + def leave_BitXorAssign_whitespace_after(self, node: "BitXorAssign") -> None: + pass + + @mark_no_op + def visit_BooleanOperation(self, node: "BooleanOperation") -> Optional[bool]: + pass + + @mark_no_op + def visit_BooleanOperation_left(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_left(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_BooleanOperation_operator(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_operator(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_BooleanOperation_right(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_right(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_BooleanOperation_lpar(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_lpar(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_BooleanOperation_rpar(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_rpar(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_Break(self, node: "Break") -> Optional[bool]: + pass + + @mark_no_op + def visit_Break_semicolon(self, node: "Break") -> None: + pass + + @mark_no_op + def leave_Break_semicolon(self, node: "Break") -> None: + pass + + @mark_no_op + def visit_Call(self, node: "Call") -> Optional[bool]: + pass + + @mark_no_op + def visit_Call_func(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_func(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_args(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_args(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_lpar(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_lpar(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_rpar(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_rpar(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_whitespace_after_func(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_whitespace_after_func(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_whitespace_before_args(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_whitespace_before_args(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_ClassDef(self, node: "ClassDef") -> Optional[bool]: + pass + + @mark_no_op + def visit_ClassDef_name(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_name(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_body(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_body(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_bases(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_bases(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_keywords(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_keywords(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_decorators(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_decorators(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_lpar(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_lpar(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_rpar(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_rpar(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_leading_lines(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_leading_lines(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_lines_after_decorators(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_lines_after_decorators(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_whitespace_after_class(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_whitespace_after_class(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_whitespace_after_name(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_whitespace_after_name(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_Colon(self, node: "Colon") -> Optional[bool]: + pass + + @mark_no_op + def visit_Colon_whitespace_before(self, node: "Colon") -> None: + pass + + @mark_no_op + def leave_Colon_whitespace_before(self, node: "Colon") -> None: + pass + + @mark_no_op + def visit_Colon_whitespace_after(self, node: "Colon") -> None: + pass + + @mark_no_op + def leave_Colon_whitespace_after(self, node: "Colon") -> None: + pass + + @mark_no_op + def visit_Comma(self, node: "Comma") -> Optional[bool]: + pass + + @mark_no_op + def visit_Comma_whitespace_before(self, node: "Comma") -> None: + pass + + @mark_no_op + def leave_Comma_whitespace_before(self, node: "Comma") -> None: + pass + + @mark_no_op + def visit_Comma_whitespace_after(self, node: "Comma") -> None: + pass + + @mark_no_op + def leave_Comma_whitespace_after(self, node: "Comma") -> None: + pass + + @mark_no_op + def visit_Comment(self, node: "Comment") -> Optional[bool]: + pass + + @mark_no_op + def visit_Comment_value(self, node: "Comment") -> None: + pass + + @mark_no_op + def leave_Comment_value(self, node: "Comment") -> None: + pass + + @mark_no_op + def visit_CompFor(self, node: "CompFor") -> Optional[bool]: + pass + + @mark_no_op + def visit_CompFor_target(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_target(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_iter(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_iter(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_ifs(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_ifs(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_inner_for_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_inner_for_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_asynchronous(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_asynchronous(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_whitespace_before(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_whitespace_before(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_whitespace_after_for(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_whitespace_after_for(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_whitespace_before_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_whitespace_before_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_whitespace_after_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_whitespace_after_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompIf(self, node: "CompIf") -> Optional[bool]: + pass + + @mark_no_op + def visit_CompIf_test(self, node: "CompIf") -> None: + pass + + @mark_no_op + def leave_CompIf_test(self, node: "CompIf") -> None: + pass + + @mark_no_op + def visit_CompIf_whitespace_before(self, node: "CompIf") -> None: + pass + + @mark_no_op + def leave_CompIf_whitespace_before(self, node: "CompIf") -> None: + pass + + @mark_no_op + def visit_CompIf_whitespace_before_test(self, node: "CompIf") -> None: + pass + + @mark_no_op + def leave_CompIf_whitespace_before_test(self, node: "CompIf") -> None: + pass + + @mark_no_op + def visit_Comparison(self, node: "Comparison") -> Optional[bool]: + pass + + @mark_no_op + def visit_Comparison_left(self, node: "Comparison") -> None: + pass + + @mark_no_op + def leave_Comparison_left(self, node: "Comparison") -> None: + pass + + @mark_no_op + def visit_Comparison_comparisons(self, node: "Comparison") -> None: + pass + + @mark_no_op + def leave_Comparison_comparisons(self, node: "Comparison") -> None: + pass + + @mark_no_op + def visit_Comparison_lpar(self, node: "Comparison") -> None: + pass + + @mark_no_op + def leave_Comparison_lpar(self, node: "Comparison") -> None: + pass + + @mark_no_op + def visit_Comparison_rpar(self, node: "Comparison") -> None: + pass + + @mark_no_op + def leave_Comparison_rpar(self, node: "Comparison") -> None: + pass + + @mark_no_op + def visit_ComparisonTarget(self, node: "ComparisonTarget") -> Optional[bool]: + pass + + @mark_no_op + def visit_ComparisonTarget_operator(self, node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def leave_ComparisonTarget_operator(self, node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def visit_ComparisonTarget_comparator(self, node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def leave_ComparisonTarget_comparator(self, node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString(self, node: "ConcatenatedString") -> Optional[bool]: + pass + + @mark_no_op + def visit_ConcatenatedString_left(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_left(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString_right(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_right(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString_lpar(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_lpar(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString_rpar(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_rpar(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString_whitespace_between( + self, node: "ConcatenatedString" + ) -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_whitespace_between( + self, node: "ConcatenatedString" + ) -> None: + pass + + @mark_no_op + def visit_Continue(self, node: "Continue") -> Optional[bool]: + pass + + @mark_no_op + def visit_Continue_semicolon(self, node: "Continue") -> None: + pass + + @mark_no_op + def leave_Continue_semicolon(self, node: "Continue") -> None: + pass + + @mark_no_op + def visit_Decorator(self, node: "Decorator") -> Optional[bool]: + pass + + @mark_no_op + def visit_Decorator_decorator(self, node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Decorator_decorator(self, node: "Decorator") -> None: + pass + + @mark_no_op + def visit_Decorator_leading_lines(self, node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Decorator_leading_lines(self, node: "Decorator") -> None: + pass + + @mark_no_op + def visit_Decorator_whitespace_after_at(self, node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Decorator_whitespace_after_at(self, node: "Decorator") -> None: + pass + + @mark_no_op + def visit_Decorator_trailing_whitespace(self, node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Decorator_trailing_whitespace(self, node: "Decorator") -> None: + pass + + @mark_no_op + def visit_Del(self, node: "Del") -> Optional[bool]: + pass + + @mark_no_op + def visit_Del_target(self, node: "Del") -> None: + pass + + @mark_no_op + def leave_Del_target(self, node: "Del") -> None: + pass + + @mark_no_op + def visit_Del_whitespace_after_del(self, node: "Del") -> None: + pass + + @mark_no_op + def leave_Del_whitespace_after_del(self, node: "Del") -> None: + pass + + @mark_no_op + def visit_Del_semicolon(self, node: "Del") -> None: + pass + + @mark_no_op + def leave_Del_semicolon(self, node: "Del") -> None: + pass + + @mark_no_op + def visit_Dict(self, node: "Dict") -> Optional[bool]: + pass + + @mark_no_op + def visit_Dict_elements(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_elements(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_Dict_lbrace(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_lbrace(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_Dict_rbrace(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_rbrace(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_Dict_lpar(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_lpar(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_Dict_rpar(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_rpar(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_DictComp(self, node: "DictComp") -> Optional[bool]: + pass + + @mark_no_op + def visit_DictComp_key(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_key(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_value(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_value(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_for_in(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_for_in(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_lbrace(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_lbrace(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_rbrace(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_rbrace(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_lpar(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_lpar(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_rpar(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_rpar(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_whitespace_before_colon(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_whitespace_before_colon(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_whitespace_after_colon(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_whitespace_after_colon(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictElement(self, node: "DictElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_DictElement_key(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_key(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_DictElement_value(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_value(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_DictElement_comma(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_comma(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_DictElement_whitespace_before_colon(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_whitespace_before_colon(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_DictElement_whitespace_after_colon(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_whitespace_after_colon(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_Divide(self, node: "Divide") -> Optional[bool]: + pass + + @mark_no_op + def visit_Divide_whitespace_before(self, node: "Divide") -> None: + pass + + @mark_no_op + def leave_Divide_whitespace_before(self, node: "Divide") -> None: + pass + + @mark_no_op + def visit_Divide_whitespace_after(self, node: "Divide") -> None: + pass + + @mark_no_op + def leave_Divide_whitespace_after(self, node: "Divide") -> None: + pass + + @mark_no_op + def visit_DivideAssign(self, node: "DivideAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_DivideAssign_whitespace_before(self, node: "DivideAssign") -> None: + pass + + @mark_no_op + def leave_DivideAssign_whitespace_before(self, node: "DivideAssign") -> None: + pass + + @mark_no_op + def visit_DivideAssign_whitespace_after(self, node: "DivideAssign") -> None: + pass + + @mark_no_op + def leave_DivideAssign_whitespace_after(self, node: "DivideAssign") -> None: + pass + + @mark_no_op + def visit_Dot(self, node: "Dot") -> Optional[bool]: + pass + + @mark_no_op + def visit_Dot_whitespace_before(self, node: "Dot") -> None: + pass + + @mark_no_op + def leave_Dot_whitespace_before(self, node: "Dot") -> None: + pass + + @mark_no_op + def visit_Dot_whitespace_after(self, node: "Dot") -> None: + pass + + @mark_no_op + def leave_Dot_whitespace_after(self, node: "Dot") -> None: + pass + + @mark_no_op + def visit_Element(self, node: "Element") -> Optional[bool]: + pass + + @mark_no_op + def visit_Element_value(self, node: "Element") -> None: + pass + + @mark_no_op + def leave_Element_value(self, node: "Element") -> None: + pass + + @mark_no_op + def visit_Element_comma(self, node: "Element") -> None: + pass + + @mark_no_op + def leave_Element_comma(self, node: "Element") -> None: + pass + + @mark_no_op + def visit_Ellipsis(self, node: "Ellipsis") -> Optional[bool]: + pass + + @mark_no_op + def visit_Ellipsis_lpar(self, node: "Ellipsis") -> None: + pass + + @mark_no_op + def leave_Ellipsis_lpar(self, node: "Ellipsis") -> None: + pass + + @mark_no_op + def visit_Ellipsis_rpar(self, node: "Ellipsis") -> None: + pass + + @mark_no_op + def leave_Ellipsis_rpar(self, node: "Ellipsis") -> None: + pass + + @mark_no_op + def visit_Else(self, node: "Else") -> Optional[bool]: + pass + + @mark_no_op + def visit_Else_body(self, node: "Else") -> None: + pass + + @mark_no_op + def leave_Else_body(self, node: "Else") -> None: + pass + + @mark_no_op + def visit_Else_leading_lines(self, node: "Else") -> None: + pass + + @mark_no_op + def leave_Else_leading_lines(self, node: "Else") -> None: + pass + + @mark_no_op + def visit_Else_whitespace_before_colon(self, node: "Else") -> None: + pass + + @mark_no_op + def leave_Else_whitespace_before_colon(self, node: "Else") -> None: + pass + + @mark_no_op + def visit_EmptyLine(self, node: "EmptyLine") -> Optional[bool]: + pass + + @mark_no_op + def visit_EmptyLine_indent(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_EmptyLine_indent(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def visit_EmptyLine_whitespace(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_EmptyLine_whitespace(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def visit_EmptyLine_comment(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_EmptyLine_comment(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def visit_EmptyLine_newline(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_EmptyLine_newline(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def visit_Equal(self, node: "Equal") -> Optional[bool]: + pass + + @mark_no_op + def visit_Equal_whitespace_before(self, node: "Equal") -> None: + pass + + @mark_no_op + def leave_Equal_whitespace_before(self, node: "Equal") -> None: + pass + + @mark_no_op + def visit_Equal_whitespace_after(self, node: "Equal") -> None: + pass + + @mark_no_op + def leave_Equal_whitespace_after(self, node: "Equal") -> None: + pass + + @mark_no_op + def visit_ExceptHandler(self, node: "ExceptHandler") -> Optional[bool]: + pass + + @mark_no_op + def visit_ExceptHandler_body(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptHandler_body(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def visit_ExceptHandler_type(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptHandler_type(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def visit_ExceptHandler_name(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptHandler_name(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def visit_ExceptHandler_leading_lines(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptHandler_leading_lines(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def visit_ExceptHandler_whitespace_after_except( + self, node: "ExceptHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptHandler_whitespace_after_except( + self, node: "ExceptHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptHandler_whitespace_before_colon( + self, node: "ExceptHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptHandler_whitespace_before_colon( + self, node: "ExceptHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler(self, node: "ExceptStarHandler") -> Optional[bool]: + pass + + @mark_no_op + def visit_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_after_except( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_after_except( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_after_star( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_after_star( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_before_colon( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_before_colon( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def visit_Expr(self, node: "Expr") -> Optional[bool]: + pass + + @mark_no_op + def visit_Expr_value(self, node: "Expr") -> None: + pass + + @mark_no_op + def leave_Expr_value(self, node: "Expr") -> None: + pass + + @mark_no_op + def visit_Expr_semicolon(self, node: "Expr") -> None: + pass + + @mark_no_op + def leave_Expr_semicolon(self, node: "Expr") -> None: + pass + + @mark_no_op + def visit_Finally(self, node: "Finally") -> Optional[bool]: + pass + + @mark_no_op + def visit_Finally_body(self, node: "Finally") -> None: + pass + + @mark_no_op + def leave_Finally_body(self, node: "Finally") -> None: + pass + + @mark_no_op + def visit_Finally_leading_lines(self, node: "Finally") -> None: + pass + + @mark_no_op + def leave_Finally_leading_lines(self, node: "Finally") -> None: + pass + + @mark_no_op + def visit_Finally_whitespace_before_colon(self, node: "Finally") -> None: + pass + + @mark_no_op + def leave_Finally_whitespace_before_colon(self, node: "Finally") -> None: + pass + + @mark_no_op + def visit_Float(self, node: "Float") -> Optional[bool]: + pass + + @mark_no_op + def visit_Float_value(self, node: "Float") -> None: + pass + + @mark_no_op + def leave_Float_value(self, node: "Float") -> None: + pass + + @mark_no_op + def visit_Float_lpar(self, node: "Float") -> None: + pass + + @mark_no_op + def leave_Float_lpar(self, node: "Float") -> None: + pass + + @mark_no_op + def visit_Float_rpar(self, node: "Float") -> None: + pass + + @mark_no_op + def leave_Float_rpar(self, node: "Float") -> None: + pass + + @mark_no_op + def visit_FloorDivide(self, node: "FloorDivide") -> Optional[bool]: + pass + + @mark_no_op + def visit_FloorDivide_whitespace_before(self, node: "FloorDivide") -> None: + pass + + @mark_no_op + def leave_FloorDivide_whitespace_before(self, node: "FloorDivide") -> None: + pass + + @mark_no_op + def visit_FloorDivide_whitespace_after(self, node: "FloorDivide") -> None: + pass + + @mark_no_op + def leave_FloorDivide_whitespace_after(self, node: "FloorDivide") -> None: + pass + + @mark_no_op + def visit_FloorDivideAssign(self, node: "FloorDivideAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_FloorDivideAssign_whitespace_before( + self, node: "FloorDivideAssign" + ) -> None: + pass + + @mark_no_op + def leave_FloorDivideAssign_whitespace_before( + self, node: "FloorDivideAssign" + ) -> None: + pass + + @mark_no_op + def visit_FloorDivideAssign_whitespace_after( + self, node: "FloorDivideAssign" + ) -> None: + pass + + @mark_no_op + def leave_FloorDivideAssign_whitespace_after( + self, node: "FloorDivideAssign" + ) -> None: + pass + + @mark_no_op + def visit_For(self, node: "For") -> Optional[bool]: + pass + + @mark_no_op + def visit_For_target(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_target(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_iter(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_iter(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_body(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_body(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_orelse(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_orelse(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_asynchronous(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_asynchronous(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_leading_lines(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_leading_lines(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_whitespace_after_for(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_whitespace_after_for(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_whitespace_before_in(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_whitespace_before_in(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_whitespace_after_in(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_whitespace_after_in(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_whitespace_before_colon(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_whitespace_before_colon(self, node: "For") -> None: + pass + + @mark_no_op + def visit_FormattedString(self, node: "FormattedString") -> Optional[bool]: + pass + + @mark_no_op + def visit_FormattedString_parts(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_parts(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedString_start(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_start(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedString_end(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_end(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedString_lpar(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_lpar(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedString_rpar(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_rpar(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression( + self, node: "FormattedStringExpression" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_FormattedStringExpression_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_conversion( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_conversion( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_format_spec( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_format_spec( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_whitespace_before_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_whitespace_before_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_whitespace_after_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_whitespace_after_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_equal( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_equal( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringText(self, node: "FormattedStringText") -> Optional[bool]: + pass + + @mark_no_op + def visit_FormattedStringText_value(self, node: "FormattedStringText") -> None: + pass + + @mark_no_op + def leave_FormattedStringText_value(self, node: "FormattedStringText") -> None: + pass + + @mark_no_op + def visit_From(self, node: "From") -> Optional[bool]: + pass + + @mark_no_op + def visit_From_item(self, node: "From") -> None: + pass + + @mark_no_op + def leave_From_item(self, node: "From") -> None: + pass + + @mark_no_op + def visit_From_whitespace_before_from(self, node: "From") -> None: + pass + + @mark_no_op + def leave_From_whitespace_before_from(self, node: "From") -> None: + pass + + @mark_no_op + def visit_From_whitespace_after_from(self, node: "From") -> None: + pass + + @mark_no_op + def leave_From_whitespace_after_from(self, node: "From") -> None: + pass + + @mark_no_op + def visit_FunctionDef(self, node: "FunctionDef") -> Optional[bool]: + pass + + @mark_no_op + def visit_FunctionDef_name(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_name(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_params(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_params(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_body(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_body(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_decorators(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_decorators(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_returns(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_returns(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_asynchronous(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_asynchronous(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_leading_lines(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_leading_lines(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_lines_after_decorators(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_lines_after_decorators(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_after_def(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_after_def(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_after_name(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_after_name(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_before_params(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_before_params(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_GeneratorExp(self, node: "GeneratorExp") -> Optional[bool]: + pass + + @mark_no_op + def visit_GeneratorExp_elt(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_GeneratorExp_elt(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def visit_GeneratorExp_for_in(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_GeneratorExp_for_in(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def visit_GeneratorExp_lpar(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_GeneratorExp_lpar(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def visit_GeneratorExp_rpar(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_GeneratorExp_rpar(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def visit_Global(self, node: "Global") -> Optional[bool]: + pass + + @mark_no_op + def visit_Global_names(self, node: "Global") -> None: + pass + + @mark_no_op + def leave_Global_names(self, node: "Global") -> None: + pass + + @mark_no_op + def visit_Global_whitespace_after_global(self, node: "Global") -> None: + pass + + @mark_no_op + def leave_Global_whitespace_after_global(self, node: "Global") -> None: + pass + + @mark_no_op + def visit_Global_semicolon(self, node: "Global") -> None: + pass + + @mark_no_op + def leave_Global_semicolon(self, node: "Global") -> None: + pass + + @mark_no_op + def visit_GreaterThan(self, node: "GreaterThan") -> Optional[bool]: + pass + + @mark_no_op + def visit_GreaterThan_whitespace_before(self, node: "GreaterThan") -> None: + pass + + @mark_no_op + def leave_GreaterThan_whitespace_before(self, node: "GreaterThan") -> None: + pass + + @mark_no_op + def visit_GreaterThan_whitespace_after(self, node: "GreaterThan") -> None: + pass + + @mark_no_op + def leave_GreaterThan_whitespace_after(self, node: "GreaterThan") -> None: + pass + + @mark_no_op + def visit_GreaterThanEqual(self, node: "GreaterThanEqual") -> Optional[bool]: + pass + + @mark_no_op + def visit_GreaterThanEqual_whitespace_before( + self, node: "GreaterThanEqual" + ) -> None: + pass + + @mark_no_op + def leave_GreaterThanEqual_whitespace_before( + self, node: "GreaterThanEqual" + ) -> None: + pass + + @mark_no_op + def visit_GreaterThanEqual_whitespace_after(self, node: "GreaterThanEqual") -> None: + pass + + @mark_no_op + def leave_GreaterThanEqual_whitespace_after(self, node: "GreaterThanEqual") -> None: + pass + + @mark_no_op + def visit_If(self, node: "If") -> Optional[bool]: + pass + + @mark_no_op + def visit_If_test(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_test(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_body(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_body(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_orelse(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_orelse(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_leading_lines(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_leading_lines(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_whitespace_before_test(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_whitespace_before_test(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_whitespace_after_test(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_whitespace_after_test(self, node: "If") -> None: + pass + + @mark_no_op + def visit_IfExp(self, node: "IfExp") -> Optional[bool]: + pass + + @mark_no_op + def visit_IfExp_test(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_test(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_body(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_body(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_orelse(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_orelse(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_lpar(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_lpar(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_rpar(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_rpar(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_whitespace_before_if(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_whitespace_before_if(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_whitespace_after_if(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_whitespace_after_if(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_whitespace_before_else(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_whitespace_before_else(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_whitespace_after_else(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_whitespace_after_else(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_Imaginary(self, node: "Imaginary") -> Optional[bool]: + pass + + @mark_no_op + def visit_Imaginary_value(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def leave_Imaginary_value(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def visit_Imaginary_lpar(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def leave_Imaginary_lpar(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def visit_Imaginary_rpar(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def leave_Imaginary_rpar(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def visit_Import(self, node: "Import") -> Optional[bool]: + pass + + @mark_no_op + def visit_Import_names(self, node: "Import") -> None: + pass + + @mark_no_op + def leave_Import_names(self, node: "Import") -> None: + pass + + @mark_no_op + def visit_Import_semicolon(self, node: "Import") -> None: + pass + + @mark_no_op + def leave_Import_semicolon(self, node: "Import") -> None: + pass + + @mark_no_op + def visit_Import_whitespace_after_import(self, node: "Import") -> None: + pass + + @mark_no_op + def leave_Import_whitespace_after_import(self, node: "Import") -> None: + pass + + @mark_no_op + def visit_ImportAlias(self, node: "ImportAlias") -> Optional[bool]: + pass + + @mark_no_op + def visit_ImportAlias_name(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def leave_ImportAlias_name(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def visit_ImportAlias_asname(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def leave_ImportAlias_asname(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def visit_ImportAlias_comma(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def leave_ImportAlias_comma(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def visit_ImportFrom(self, node: "ImportFrom") -> Optional[bool]: + pass + + @mark_no_op + def visit_ImportFrom_module(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_module(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_names(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_names(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_relative(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_relative(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_lpar(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_lpar(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_rpar(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_rpar(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_semicolon(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_semicolon(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_whitespace_after_from(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_whitespace_after_from(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_whitespace_before_import(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_whitespace_before_import(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_whitespace_after_import(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_whitespace_after_import(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportStar(self, node: "ImportStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_In(self, node: "In") -> Optional[bool]: + pass + + @mark_no_op + def visit_In_whitespace_before(self, node: "In") -> None: + pass + + @mark_no_op + def leave_In_whitespace_before(self, node: "In") -> None: + pass + + @mark_no_op + def visit_In_whitespace_after(self, node: "In") -> None: + pass + + @mark_no_op + def leave_In_whitespace_after(self, node: "In") -> None: + pass + + @mark_no_op + def visit_IndentedBlock(self, node: "IndentedBlock") -> Optional[bool]: + pass + + @mark_no_op + def visit_IndentedBlock_body(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_IndentedBlock_body(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def visit_IndentedBlock_header(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_IndentedBlock_header(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def visit_IndentedBlock_indent(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_IndentedBlock_indent(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def visit_IndentedBlock_footer(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_IndentedBlock_footer(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def visit_Index(self, node: "Index") -> Optional[bool]: + pass + + @mark_no_op + def visit_Index_value(self, node: "Index") -> None: + pass + + @mark_no_op + def leave_Index_value(self, node: "Index") -> None: + pass + + @mark_no_op + def visit_Index_star(self, node: "Index") -> None: + pass + + @mark_no_op + def leave_Index_star(self, node: "Index") -> None: + pass + + @mark_no_op + def visit_Index_whitespace_after_star(self, node: "Index") -> None: + pass + + @mark_no_op + def leave_Index_whitespace_after_star(self, node: "Index") -> None: + pass + + @mark_no_op + def visit_Integer(self, node: "Integer") -> Optional[bool]: + pass + + @mark_no_op + def visit_Integer_value(self, node: "Integer") -> None: + pass + + @mark_no_op + def leave_Integer_value(self, node: "Integer") -> None: + pass + + @mark_no_op + def visit_Integer_lpar(self, node: "Integer") -> None: + pass + + @mark_no_op + def leave_Integer_lpar(self, node: "Integer") -> None: + pass + + @mark_no_op + def visit_Integer_rpar(self, node: "Integer") -> None: + pass + + @mark_no_op + def leave_Integer_rpar(self, node: "Integer") -> None: + pass + + @mark_no_op + def visit_Is(self, node: "Is") -> Optional[bool]: + pass + + @mark_no_op + def visit_Is_whitespace_before(self, node: "Is") -> None: + pass + + @mark_no_op + def leave_Is_whitespace_before(self, node: "Is") -> None: + pass + + @mark_no_op + def visit_Is_whitespace_after(self, node: "Is") -> None: + pass + + @mark_no_op + def leave_Is_whitespace_after(self, node: "Is") -> None: + pass + + @mark_no_op + def visit_IsNot(self, node: "IsNot") -> Optional[bool]: + pass + + @mark_no_op + def visit_IsNot_whitespace_before(self, node: "IsNot") -> None: + pass + + @mark_no_op + def leave_IsNot_whitespace_before(self, node: "IsNot") -> None: + pass + + @mark_no_op + def visit_IsNot_whitespace_between(self, node: "IsNot") -> None: + pass + + @mark_no_op + def leave_IsNot_whitespace_between(self, node: "IsNot") -> None: + pass + + @mark_no_op + def visit_IsNot_whitespace_after(self, node: "IsNot") -> None: + pass + + @mark_no_op + def leave_IsNot_whitespace_after(self, node: "IsNot") -> None: + pass + + @mark_no_op + def visit_Lambda(self, node: "Lambda") -> Optional[bool]: + pass + + @mark_no_op + def visit_Lambda_params(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_params(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_body(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_body(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_colon(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_colon(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_lpar(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_lpar(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_rpar(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_rpar(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_whitespace_after_lambda(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_whitespace_after_lambda(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_LeftCurlyBrace(self, node: "LeftCurlyBrace") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftCurlyBrace_whitespace_after(self, node: "LeftCurlyBrace") -> None: + pass + + @mark_no_op + def leave_LeftCurlyBrace_whitespace_after(self, node: "LeftCurlyBrace") -> None: + pass + + @mark_no_op + def visit_LeftParen(self, node: "LeftParen") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftParen_whitespace_after(self, node: "LeftParen") -> None: + pass + + @mark_no_op + def leave_LeftParen_whitespace_after(self, node: "LeftParen") -> None: + pass + + @mark_no_op + def visit_LeftShift(self, node: "LeftShift") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftShift_whitespace_before(self, node: "LeftShift") -> None: + pass + + @mark_no_op + def leave_LeftShift_whitespace_before(self, node: "LeftShift") -> None: + pass + + @mark_no_op + def visit_LeftShift_whitespace_after(self, node: "LeftShift") -> None: + pass + + @mark_no_op + def leave_LeftShift_whitespace_after(self, node: "LeftShift") -> None: + pass + + @mark_no_op + def visit_LeftShiftAssign(self, node: "LeftShiftAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftShiftAssign_whitespace_before(self, node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def leave_LeftShiftAssign_whitespace_before(self, node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def visit_LeftShiftAssign_whitespace_after(self, node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def leave_LeftShiftAssign_whitespace_after(self, node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def visit_LeftSquareBracket(self, node: "LeftSquareBracket") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftSquareBracket_whitespace_after( + self, node: "LeftSquareBracket" + ) -> None: + pass + + @mark_no_op + def leave_LeftSquareBracket_whitespace_after( + self, node: "LeftSquareBracket" + ) -> None: + pass + + @mark_no_op + def visit_LessThan(self, node: "LessThan") -> Optional[bool]: + pass + + @mark_no_op + def visit_LessThan_whitespace_before(self, node: "LessThan") -> None: + pass + + @mark_no_op + def leave_LessThan_whitespace_before(self, node: "LessThan") -> None: + pass + + @mark_no_op + def visit_LessThan_whitespace_after(self, node: "LessThan") -> None: + pass + + @mark_no_op + def leave_LessThan_whitespace_after(self, node: "LessThan") -> None: + pass + + @mark_no_op + def visit_LessThanEqual(self, node: "LessThanEqual") -> Optional[bool]: + pass + + @mark_no_op + def visit_LessThanEqual_whitespace_before(self, node: "LessThanEqual") -> None: + pass + + @mark_no_op + def leave_LessThanEqual_whitespace_before(self, node: "LessThanEqual") -> None: + pass + + @mark_no_op + def visit_LessThanEqual_whitespace_after(self, node: "LessThanEqual") -> None: + pass + + @mark_no_op + def leave_LessThanEqual_whitespace_after(self, node: "LessThanEqual") -> None: + pass + + @mark_no_op + def visit_List(self, node: "List") -> Optional[bool]: + pass + + @mark_no_op + def visit_List_elements(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_elements(self, node: "List") -> None: + pass + + @mark_no_op + def visit_List_lbracket(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_lbracket(self, node: "List") -> None: + pass + + @mark_no_op + def visit_List_rbracket(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_rbracket(self, node: "List") -> None: + pass + + @mark_no_op + def visit_List_lpar(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_lpar(self, node: "List") -> None: + pass + + @mark_no_op + def visit_List_rpar(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_rpar(self, node: "List") -> None: + pass + + @mark_no_op + def visit_ListComp(self, node: "ListComp") -> Optional[bool]: + pass + + @mark_no_op + def visit_ListComp_elt(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_elt(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_for_in(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_for_in(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_lbracket(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_lbracket(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_rbracket(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_rbracket(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_lpar(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_lpar(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_rpar(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_rpar(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_Match(self, node: "Match") -> Optional[bool]: + pass + + @mark_no_op + def visit_Match_subject(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_subject(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_cases(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_cases(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_leading_lines(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_leading_lines(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_after_match(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_after_match(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_before_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_before_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_after_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_after_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_indent(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_indent(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_footer(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_footer(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_MatchAs(self, node: "MatchAs") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchAs_pattern(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_pattern(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_name(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_name(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_lpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_lpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_rpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_rpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchCase(self, node: "MatchCase") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchCase_pattern(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_pattern(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_body(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_body(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_guard(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_guard(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_leading_lines(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_leading_lines(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchClass(self, node: "MatchClass") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchClass_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_lpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_lpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_rpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_rpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement(self, node: "MatchKeywordElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_whitespace_before_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_whitespace_before_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_whitespace_after_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_whitespace_after_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchList(self, node: "MatchList") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchList_patterns(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_patterns(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_lbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_lbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_rbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_rbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_lpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_lpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_rpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_rpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchMapping(self, node: "MatchMapping") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchMapping_elements(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_elements(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_lbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_lbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_lpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_lpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement(self, node: "MatchMappingElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_whitespace_before_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_whitespace_before_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_whitespace_after_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_whitespace_after_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchOr(self, node: "MatchOr") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchOr_patterns(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_patterns(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOr_lpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_lpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOr_rpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_rpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOrElement(self, node: "MatchOrElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def visit_MatchOrElement_separator(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchOrElement_separator(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def visit_MatchPattern(self, node: "MatchPattern") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequence(self, node: "MatchSequence") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequenceElement( + self, node: "MatchSequenceElement" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def visit_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def visit_MatchSingleton(self, node: "MatchSingleton") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSingleton_value(self, node: "MatchSingleton") -> None: + pass + + @mark_no_op + def leave_MatchSingleton_value(self, node: "MatchSingleton") -> None: + pass + + @mark_no_op + def visit_MatchStar(self, node: "MatchStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchStar_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchStar_comma(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_comma(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchTuple(self, node: "MatchTuple") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchTuple_patterns(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_patterns(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchTuple_lpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_lpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchTuple_rpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_rpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchValue(self, node: "MatchValue") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchValue_value(self, node: "MatchValue") -> None: + pass + + @mark_no_op + def leave_MatchValue_value(self, node: "MatchValue") -> None: + pass + + @mark_no_op + def visit_MatrixMultiply(self, node: "MatrixMultiply") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatrixMultiply_whitespace_before(self, node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def leave_MatrixMultiply_whitespace_before(self, node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def visit_MatrixMultiply_whitespace_after(self, node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def leave_MatrixMultiply_whitespace_after(self, node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def visit_MatrixMultiplyAssign( + self, node: "MatrixMultiplyAssign" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_MatrixMultiplyAssign_whitespace_before( + self, node: "MatrixMultiplyAssign" + ) -> None: + pass + + @mark_no_op + def leave_MatrixMultiplyAssign_whitespace_before( + self, node: "MatrixMultiplyAssign" + ) -> None: + pass + + @mark_no_op + def visit_MatrixMultiplyAssign_whitespace_after( + self, node: "MatrixMultiplyAssign" + ) -> None: + pass + + @mark_no_op + def leave_MatrixMultiplyAssign_whitespace_after( + self, node: "MatrixMultiplyAssign" + ) -> None: + pass + + @mark_no_op + def visit_Minus(self, node: "Minus") -> Optional[bool]: + pass + + @mark_no_op + def visit_Minus_whitespace_after(self, node: "Minus") -> None: + pass + + @mark_no_op + def leave_Minus_whitespace_after(self, node: "Minus") -> None: + pass + + @mark_no_op + def visit_Module(self, node: "Module") -> Optional[bool]: + pass + + @mark_no_op + def visit_Module_body(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_body(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_header(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_header(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_footer(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_footer(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_encoding(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_encoding(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_default_indent(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_default_indent(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_default_newline(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_default_newline(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_has_trailing_newline(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_has_trailing_newline(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Modulo(self, node: "Modulo") -> Optional[bool]: + pass + + @mark_no_op + def visit_Modulo_whitespace_before(self, node: "Modulo") -> None: + pass + + @mark_no_op + def leave_Modulo_whitespace_before(self, node: "Modulo") -> None: + pass + + @mark_no_op + def visit_Modulo_whitespace_after(self, node: "Modulo") -> None: + pass + + @mark_no_op + def leave_Modulo_whitespace_after(self, node: "Modulo") -> None: + pass + + @mark_no_op + def visit_ModuloAssign(self, node: "ModuloAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_ModuloAssign_whitespace_before(self, node: "ModuloAssign") -> None: + pass + + @mark_no_op + def leave_ModuloAssign_whitespace_before(self, node: "ModuloAssign") -> None: + pass + + @mark_no_op + def visit_ModuloAssign_whitespace_after(self, node: "ModuloAssign") -> None: + pass + + @mark_no_op + def leave_ModuloAssign_whitespace_after(self, node: "ModuloAssign") -> None: + pass + + @mark_no_op + def visit_Multiply(self, node: "Multiply") -> Optional[bool]: + pass + + @mark_no_op + def visit_Multiply_whitespace_before(self, node: "Multiply") -> None: + pass + + @mark_no_op + def leave_Multiply_whitespace_before(self, node: "Multiply") -> None: + pass + + @mark_no_op + def visit_Multiply_whitespace_after(self, node: "Multiply") -> None: + pass + + @mark_no_op + def leave_Multiply_whitespace_after(self, node: "Multiply") -> None: + pass + + @mark_no_op + def visit_MultiplyAssign(self, node: "MultiplyAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_MultiplyAssign_whitespace_before(self, node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def leave_MultiplyAssign_whitespace_before(self, node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def visit_MultiplyAssign_whitespace_after(self, node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def leave_MultiplyAssign_whitespace_after(self, node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def visit_Name(self, node: "Name") -> Optional[bool]: + pass + + @mark_no_op + def visit_Name_value(self, node: "Name") -> None: + pass + + @mark_no_op + def leave_Name_value(self, node: "Name") -> None: + pass + + @mark_no_op + def visit_Name_lpar(self, node: "Name") -> None: + pass + + @mark_no_op + def leave_Name_lpar(self, node: "Name") -> None: + pass + + @mark_no_op + def visit_Name_rpar(self, node: "Name") -> None: + pass + + @mark_no_op + def leave_Name_rpar(self, node: "Name") -> None: + pass + + @mark_no_op + def visit_NameItem(self, node: "NameItem") -> Optional[bool]: + pass + + @mark_no_op + def visit_NameItem_name(self, node: "NameItem") -> None: + pass + + @mark_no_op + def leave_NameItem_name(self, node: "NameItem") -> None: + pass + + @mark_no_op + def visit_NameItem_comma(self, node: "NameItem") -> None: + pass + + @mark_no_op + def leave_NameItem_comma(self, node: "NameItem") -> None: + pass + + @mark_no_op + def visit_NamedExpr(self, node: "NamedExpr") -> Optional[bool]: + pass + + @mark_no_op + def visit_NamedExpr_target(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_target(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_value(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_value(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_lpar(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_lpar(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_rpar(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_rpar(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_whitespace_before_walrus(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_whitespace_before_walrus(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_whitespace_after_walrus(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_whitespace_after_walrus(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_Newline(self, node: "Newline") -> Optional[bool]: + pass + + @mark_no_op + def visit_Newline_value(self, node: "Newline") -> None: + pass + + @mark_no_op + def leave_Newline_value(self, node: "Newline") -> None: + pass + + @mark_no_op + def visit_Nonlocal(self, node: "Nonlocal") -> Optional[bool]: + pass + + @mark_no_op + def visit_Nonlocal_names(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def leave_Nonlocal_names(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def visit_Nonlocal_whitespace_after_nonlocal(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def leave_Nonlocal_whitespace_after_nonlocal(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def visit_Nonlocal_semicolon(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def leave_Nonlocal_semicolon(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def visit_Not(self, node: "Not") -> Optional[bool]: + pass + + @mark_no_op + def visit_Not_whitespace_after(self, node: "Not") -> None: + pass + + @mark_no_op + def leave_Not_whitespace_after(self, node: "Not") -> None: + pass + + @mark_no_op + def visit_NotEqual(self, node: "NotEqual") -> Optional[bool]: + pass + + @mark_no_op + def visit_NotEqual_value(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def leave_NotEqual_value(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def visit_NotEqual_whitespace_before(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def leave_NotEqual_whitespace_before(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def visit_NotEqual_whitespace_after(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def leave_NotEqual_whitespace_after(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def visit_NotIn(self, node: "NotIn") -> Optional[bool]: + pass + + @mark_no_op + def visit_NotIn_whitespace_before(self, node: "NotIn") -> None: + pass + + @mark_no_op + def leave_NotIn_whitespace_before(self, node: "NotIn") -> None: + pass + + @mark_no_op + def visit_NotIn_whitespace_between(self, node: "NotIn") -> None: + pass + + @mark_no_op + def leave_NotIn_whitespace_between(self, node: "NotIn") -> None: + pass + + @mark_no_op + def visit_NotIn_whitespace_after(self, node: "NotIn") -> None: + pass + + @mark_no_op + def leave_NotIn_whitespace_after(self, node: "NotIn") -> None: + pass + + @mark_no_op + def visit_Or(self, node: "Or") -> Optional[bool]: + pass + + @mark_no_op + def visit_Or_whitespace_before(self, node: "Or") -> None: + pass + + @mark_no_op + def leave_Or_whitespace_before(self, node: "Or") -> None: + pass + + @mark_no_op + def visit_Or_whitespace_after(self, node: "Or") -> None: + pass + + @mark_no_op + def leave_Or_whitespace_after(self, node: "Or") -> None: + pass + + @mark_no_op + def visit_Param(self, node: "Param") -> Optional[bool]: + pass + + @mark_no_op + def visit_Param_name(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_name(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_annotation(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_annotation(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_equal(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_equal(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_default(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_default(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_comma(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_comma(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_star(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_star(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_whitespace_after_star(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_whitespace_after_star(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_whitespace_after_param(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_whitespace_after_param(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_ParamSlash(self, node: "ParamSlash") -> Optional[bool]: + pass + + @mark_no_op + def visit_ParamSlash_comma(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def leave_ParamSlash_comma(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def visit_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def leave_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def visit_ParamStar(self, node: "ParamStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_ParamStar_comma(self, node: "ParamStar") -> None: + pass + + @mark_no_op + def leave_ParamStar_comma(self, node: "ParamStar") -> None: + pass + + @mark_no_op + def visit_Parameters(self, node: "Parameters") -> Optional[bool]: + pass + + @mark_no_op + def visit_Parameters_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_star_arg(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_star_arg(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_kwonly_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_kwonly_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_star_kwarg(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_star_kwarg(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_posonly_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_posonly_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_posonly_ind(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_posonly_ind(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace( + self, node: "ParenthesizedWhitespace" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace_first_line( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace_first_line( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace_empty_lines( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace_empty_lines( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace_indent( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace_indent( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace_last_line( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace_last_line( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def visit_Pass(self, node: "Pass") -> Optional[bool]: + pass + + @mark_no_op + def visit_Pass_semicolon(self, node: "Pass") -> None: + pass + + @mark_no_op + def leave_Pass_semicolon(self, node: "Pass") -> None: + pass + + @mark_no_op + def visit_Plus(self, node: "Plus") -> Optional[bool]: + pass + + @mark_no_op + def visit_Plus_whitespace_after(self, node: "Plus") -> None: + pass + + @mark_no_op + def leave_Plus_whitespace_after(self, node: "Plus") -> None: + pass + + @mark_no_op + def visit_Power(self, node: "Power") -> Optional[bool]: + pass + + @mark_no_op + def visit_Power_whitespace_before(self, node: "Power") -> None: + pass + + @mark_no_op + def leave_Power_whitespace_before(self, node: "Power") -> None: + pass + + @mark_no_op + def visit_Power_whitespace_after(self, node: "Power") -> None: + pass + + @mark_no_op + def leave_Power_whitespace_after(self, node: "Power") -> None: + pass + + @mark_no_op + def visit_PowerAssign(self, node: "PowerAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_PowerAssign_whitespace_before(self, node: "PowerAssign") -> None: + pass + + @mark_no_op + def leave_PowerAssign_whitespace_before(self, node: "PowerAssign") -> None: + pass + + @mark_no_op + def visit_PowerAssign_whitespace_after(self, node: "PowerAssign") -> None: + pass + + @mark_no_op + def leave_PowerAssign_whitespace_after(self, node: "PowerAssign") -> None: + pass + + @mark_no_op + def visit_Raise(self, node: "Raise") -> Optional[bool]: + pass + + @mark_no_op + def visit_Raise_exc(self, node: "Raise") -> None: + pass + + @mark_no_op + def leave_Raise_exc(self, node: "Raise") -> None: + pass + + @mark_no_op + def visit_Raise_cause(self, node: "Raise") -> None: + pass + + @mark_no_op + def leave_Raise_cause(self, node: "Raise") -> None: + pass + + @mark_no_op + def visit_Raise_whitespace_after_raise(self, node: "Raise") -> None: + pass + + @mark_no_op + def leave_Raise_whitespace_after_raise(self, node: "Raise") -> None: + pass + + @mark_no_op + def visit_Raise_semicolon(self, node: "Raise") -> None: + pass + + @mark_no_op + def leave_Raise_semicolon(self, node: "Raise") -> None: + pass + + @mark_no_op + def visit_Return(self, node: "Return") -> Optional[bool]: + pass + + @mark_no_op + def visit_Return_value(self, node: "Return") -> None: + pass + + @mark_no_op + def leave_Return_value(self, node: "Return") -> None: + pass + + @mark_no_op + def visit_Return_whitespace_after_return(self, node: "Return") -> None: + pass + + @mark_no_op + def leave_Return_whitespace_after_return(self, node: "Return") -> None: + pass + + @mark_no_op + def visit_Return_semicolon(self, node: "Return") -> None: + pass + + @mark_no_op + def leave_Return_semicolon(self, node: "Return") -> None: + pass + + @mark_no_op + def visit_RightCurlyBrace(self, node: "RightCurlyBrace") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightCurlyBrace_whitespace_before(self, node: "RightCurlyBrace") -> None: + pass + + @mark_no_op + def leave_RightCurlyBrace_whitespace_before(self, node: "RightCurlyBrace") -> None: + pass + + @mark_no_op + def visit_RightParen(self, node: "RightParen") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightParen_whitespace_before(self, node: "RightParen") -> None: + pass + + @mark_no_op + def leave_RightParen_whitespace_before(self, node: "RightParen") -> None: + pass + + @mark_no_op + def visit_RightShift(self, node: "RightShift") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightShift_whitespace_before(self, node: "RightShift") -> None: + pass + + @mark_no_op + def leave_RightShift_whitespace_before(self, node: "RightShift") -> None: + pass + + @mark_no_op + def visit_RightShift_whitespace_after(self, node: "RightShift") -> None: + pass + + @mark_no_op + def leave_RightShift_whitespace_after(self, node: "RightShift") -> None: + pass + + @mark_no_op + def visit_RightShiftAssign(self, node: "RightShiftAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightShiftAssign_whitespace_before( + self, node: "RightShiftAssign" + ) -> None: + pass + + @mark_no_op + def leave_RightShiftAssign_whitespace_before( + self, node: "RightShiftAssign" + ) -> None: + pass + + @mark_no_op + def visit_RightShiftAssign_whitespace_after(self, node: "RightShiftAssign") -> None: + pass + + @mark_no_op + def leave_RightShiftAssign_whitespace_after(self, node: "RightShiftAssign") -> None: + pass + + @mark_no_op + def visit_RightSquareBracket(self, node: "RightSquareBracket") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightSquareBracket_whitespace_before( + self, node: "RightSquareBracket" + ) -> None: + pass + + @mark_no_op + def leave_RightSquareBracket_whitespace_before( + self, node: "RightSquareBracket" + ) -> None: + pass + + @mark_no_op + def visit_Semicolon(self, node: "Semicolon") -> Optional[bool]: + pass + + @mark_no_op + def visit_Semicolon_whitespace_before(self, node: "Semicolon") -> None: + pass + + @mark_no_op + def leave_Semicolon_whitespace_before(self, node: "Semicolon") -> None: + pass + + @mark_no_op + def visit_Semicolon_whitespace_after(self, node: "Semicolon") -> None: + pass + + @mark_no_op + def leave_Semicolon_whitespace_after(self, node: "Semicolon") -> None: + pass + + @mark_no_op + def visit_Set(self, node: "Set") -> Optional[bool]: + pass + + @mark_no_op + def visit_Set_elements(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_elements(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_Set_lbrace(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_lbrace(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_Set_rbrace(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_rbrace(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_Set_lpar(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_lpar(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_Set_rpar(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_rpar(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_SetComp(self, node: "SetComp") -> Optional[bool]: + pass + + @mark_no_op + def visit_SetComp_elt(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_elt(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_for_in(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_for_in(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_lbrace(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_lbrace(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_rbrace(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_rbrace(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_lpar(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_lpar(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_rpar(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_rpar(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SimpleStatementLine(self, node: "SimpleStatementLine") -> Optional[bool]: + pass + + @mark_no_op + def visit_SimpleStatementLine_body(self, node: "SimpleStatementLine") -> None: + pass + + @mark_no_op + def leave_SimpleStatementLine_body(self, node: "SimpleStatementLine") -> None: + pass + + @mark_no_op + def visit_SimpleStatementLine_leading_lines( + self, node: "SimpleStatementLine" + ) -> None: + pass + + @mark_no_op + def leave_SimpleStatementLine_leading_lines( + self, node: "SimpleStatementLine" + ) -> None: + pass + + @mark_no_op + def visit_SimpleStatementLine_trailing_whitespace( + self, node: "SimpleStatementLine" + ) -> None: + pass + + @mark_no_op + def leave_SimpleStatementLine_trailing_whitespace( + self, node: "SimpleStatementLine" + ) -> None: + pass + + @mark_no_op + def visit_SimpleStatementSuite( + self, node: "SimpleStatementSuite" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_SimpleStatementSuite_body(self, node: "SimpleStatementSuite") -> None: + pass + + @mark_no_op + def leave_SimpleStatementSuite_body(self, node: "SimpleStatementSuite") -> None: + pass + + @mark_no_op + def visit_SimpleStatementSuite_leading_whitespace( + self, node: "SimpleStatementSuite" + ) -> None: + pass + + @mark_no_op + def leave_SimpleStatementSuite_leading_whitespace( + self, node: "SimpleStatementSuite" + ) -> None: + pass + + @mark_no_op + def visit_SimpleStatementSuite_trailing_whitespace( + self, node: "SimpleStatementSuite" + ) -> None: + pass + + @mark_no_op + def leave_SimpleStatementSuite_trailing_whitespace( + self, node: "SimpleStatementSuite" + ) -> None: + pass + + @mark_no_op + def visit_SimpleString(self, node: "SimpleString") -> Optional[bool]: + pass + + @mark_no_op + def visit_SimpleString_value(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def leave_SimpleString_value(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def visit_SimpleString_lpar(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def leave_SimpleString_lpar(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def visit_SimpleString_rpar(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def leave_SimpleString_rpar(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def visit_SimpleWhitespace(self, node: "SimpleWhitespace") -> Optional[bool]: + pass + + @mark_no_op + def visit_SimpleWhitespace_value(self, node: "SimpleWhitespace") -> None: + pass + + @mark_no_op + def leave_SimpleWhitespace_value(self, node: "SimpleWhitespace") -> None: + pass + + @mark_no_op + def visit_Slice(self, node: "Slice") -> Optional[bool]: + pass + + @mark_no_op + def visit_Slice_lower(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_lower(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_Slice_upper(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_upper(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_Slice_step(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_step(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_Slice_first_colon(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_first_colon(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_Slice_second_colon(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_second_colon(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_StarredDictElement(self, node: "StarredDictElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_StarredDictElement_value(self, node: "StarredDictElement") -> None: + pass + + @mark_no_op + def leave_StarredDictElement_value(self, node: "StarredDictElement") -> None: + pass + + @mark_no_op + def visit_StarredDictElement_comma(self, node: "StarredDictElement") -> None: + pass + + @mark_no_op + def leave_StarredDictElement_comma(self, node: "StarredDictElement") -> None: + pass + + @mark_no_op + def visit_StarredDictElement_whitespace_before_value( + self, node: "StarredDictElement" + ) -> None: + pass + + @mark_no_op + def leave_StarredDictElement_whitespace_before_value( + self, node: "StarredDictElement" + ) -> None: + pass + + @mark_no_op + def visit_StarredElement(self, node: "StarredElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_StarredElement_value(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_StarredElement_value(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def visit_StarredElement_comma(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_StarredElement_comma(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def visit_StarredElement_lpar(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_StarredElement_lpar(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def visit_StarredElement_rpar(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_StarredElement_rpar(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def visit_StarredElement_whitespace_before_value( + self, node: "StarredElement" + ) -> None: + pass + + @mark_no_op + def leave_StarredElement_whitespace_before_value( + self, node: "StarredElement" + ) -> None: + pass + + @mark_no_op + def visit_Subscript(self, node: "Subscript") -> Optional[bool]: + pass + + @mark_no_op + def visit_Subscript_value(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_value(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_slice(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_slice(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_lbracket(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_lbracket(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_rbracket(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_rbracket(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_lpar(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_lpar(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_rpar(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_rpar(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_whitespace_after_value(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_whitespace_after_value(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_SubscriptElement(self, node: "SubscriptElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_SubscriptElement_slice(self, node: "SubscriptElement") -> None: + pass + + @mark_no_op + def leave_SubscriptElement_slice(self, node: "SubscriptElement") -> None: + pass + + @mark_no_op + def visit_SubscriptElement_comma(self, node: "SubscriptElement") -> None: + pass + + @mark_no_op + def leave_SubscriptElement_comma(self, node: "SubscriptElement") -> None: + pass + + @mark_no_op + def visit_Subtract(self, node: "Subtract") -> Optional[bool]: + pass + + @mark_no_op + def visit_Subtract_whitespace_before(self, node: "Subtract") -> None: + pass + + @mark_no_op + def leave_Subtract_whitespace_before(self, node: "Subtract") -> None: + pass + + @mark_no_op + def visit_Subtract_whitespace_after(self, node: "Subtract") -> None: + pass + + @mark_no_op + def leave_Subtract_whitespace_after(self, node: "Subtract") -> None: + pass + + @mark_no_op + def visit_SubtractAssign(self, node: "SubtractAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_SubtractAssign_whitespace_before(self, node: "SubtractAssign") -> None: + pass + + @mark_no_op + def leave_SubtractAssign_whitespace_before(self, node: "SubtractAssign") -> None: + pass + + @mark_no_op + def visit_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: + pass + + @mark_no_op + def leave_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: + pass + + @mark_no_op + def visit_TrailingWhitespace(self, node: "TrailingWhitespace") -> Optional[bool]: + pass + + @mark_no_op + def visit_TrailingWhitespace_whitespace(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def leave_TrailingWhitespace_whitespace(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def visit_TrailingWhitespace_comment(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def leave_TrailingWhitespace_comment(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def visit_TrailingWhitespace_newline(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def leave_TrailingWhitespace_newline(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def visit_Try(self, node: "Try") -> Optional[bool]: + pass + + @mark_no_op + def visit_Try_body(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_body(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_handlers(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_handlers(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_orelse(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_orelse(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_finalbody(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_finalbody(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_leading_lines(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_leading_lines(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_whitespace_before_colon(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_whitespace_before_colon(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_TryStar(self, node: "TryStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_TryStar_body(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_body(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_handlers(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_handlers(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_orelse(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_orelse(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_finalbody(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_finalbody(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_leading_lines(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_leading_lines(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_Tuple(self, node: "Tuple") -> Optional[bool]: + pass + + @mark_no_op + def visit_Tuple_elements(self, node: "Tuple") -> None: + pass + + @mark_no_op + def leave_Tuple_elements(self, node: "Tuple") -> None: + pass + + @mark_no_op + def visit_Tuple_lpar(self, node: "Tuple") -> None: + pass + + @mark_no_op + def leave_Tuple_lpar(self, node: "Tuple") -> None: + pass + + @mark_no_op + def visit_Tuple_rpar(self, node: "Tuple") -> None: + pass + + @mark_no_op + def leave_Tuple_rpar(self, node: "Tuple") -> None: + pass + + @mark_no_op + def visit_UnaryOperation(self, node: "UnaryOperation") -> Optional[bool]: + pass + + @mark_no_op + def visit_UnaryOperation_operator(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_UnaryOperation_operator(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def visit_UnaryOperation_expression(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_UnaryOperation_expression(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def visit_UnaryOperation_lpar(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_UnaryOperation_lpar(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def visit_UnaryOperation_rpar(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_UnaryOperation_rpar(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def visit_While(self, node: "While") -> Optional[bool]: + pass + + @mark_no_op + def visit_While_test(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_test(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_body(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_body(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_orelse(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_orelse(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_leading_lines(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_leading_lines(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_whitespace_after_while(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_whitespace_after_while(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_whitespace_before_colon(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_whitespace_before_colon(self, node: "While") -> None: + pass + + @mark_no_op + def visit_With(self, node: "With") -> Optional[bool]: + pass + + @mark_no_op + def visit_With_items(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_items(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_body(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_body(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_asynchronous(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_asynchronous(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_leading_lines(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_leading_lines(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_lpar(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_lpar(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_rpar(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_rpar(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_whitespace_after_with(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_whitespace_after_with(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_whitespace_before_colon(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_whitespace_before_colon(self, node: "With") -> None: + pass + + @mark_no_op + def visit_WithItem(self, node: "WithItem") -> Optional[bool]: + pass + + @mark_no_op + def visit_WithItem_item(self, node: "WithItem") -> None: + pass + + @mark_no_op + def leave_WithItem_item(self, node: "WithItem") -> None: + pass + + @mark_no_op + def visit_WithItem_asname(self, node: "WithItem") -> None: + pass + + @mark_no_op + def leave_WithItem_asname(self, node: "WithItem") -> None: + pass + + @mark_no_op + def visit_WithItem_comma(self, node: "WithItem") -> None: + pass + + @mark_no_op + def leave_WithItem_comma(self, node: "WithItem") -> None: + pass + + @mark_no_op + def visit_Yield(self, node: "Yield") -> Optional[bool]: + pass + + @mark_no_op + def visit_Yield_value(self, node: "Yield") -> None: + pass + + @mark_no_op + def leave_Yield_value(self, node: "Yield") -> None: + pass + + @mark_no_op + def visit_Yield_lpar(self, node: "Yield") -> None: + pass + + @mark_no_op + def leave_Yield_lpar(self, node: "Yield") -> None: + pass + + @mark_no_op + def visit_Yield_rpar(self, node: "Yield") -> None: + pass + + @mark_no_op + def leave_Yield_rpar(self, node: "Yield") -> None: + pass + + @mark_no_op + def visit_Yield_whitespace_after_yield(self, node: "Yield") -> None: + pass + + @mark_no_op + def leave_Yield_whitespace_after_yield(self, node: "Yield") -> None: + pass + + +class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): + @mark_no_op + def leave_Add(self, original_node: "Add") -> None: + pass + + @mark_no_op + def leave_AddAssign(self, original_node: "AddAssign") -> None: + pass + + @mark_no_op + def leave_And(self, original_node: "And") -> None: + pass + + @mark_no_op + def leave_AnnAssign(self, original_node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_Annotation(self, original_node: "Annotation") -> None: + pass + + @mark_no_op + def leave_Arg(self, original_node: "Arg") -> None: + pass + + @mark_no_op + def leave_AsName(self, original_node: "AsName") -> None: + pass + + @mark_no_op + def leave_Assert(self, original_node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assign(self, original_node: "Assign") -> None: + pass + + @mark_no_op + def leave_AssignEqual(self, original_node: "AssignEqual") -> None: + pass + + @mark_no_op + def leave_AssignTarget(self, original_node: "AssignTarget") -> None: + pass + + @mark_no_op + def leave_Asynchronous(self, original_node: "Asynchronous") -> None: + pass + + @mark_no_op + def leave_Attribute(self, original_node: "Attribute") -> None: + pass + + @mark_no_op + def leave_AugAssign(self, original_node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_Await(self, original_node: "Await") -> None: + pass + + @mark_no_op + def leave_BinaryOperation(self, original_node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BitAnd(self, original_node: "BitAnd") -> None: + pass + + @mark_no_op + def leave_BitAndAssign(self, original_node: "BitAndAssign") -> None: + pass + + @mark_no_op + def leave_BitInvert(self, original_node: "BitInvert") -> None: + pass + + @mark_no_op + def leave_BitOr(self, original_node: "BitOr") -> None: + pass + + @mark_no_op + def leave_BitOrAssign(self, original_node: "BitOrAssign") -> None: + pass + + @mark_no_op + def leave_BitXor(self, original_node: "BitXor") -> None: + pass + + @mark_no_op + def leave_BitXorAssign(self, original_node: "BitXorAssign") -> None: + pass + + @mark_no_op + def leave_BooleanOperation(self, original_node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_Break(self, original_node: "Break") -> None: + pass + + @mark_no_op + def leave_Call(self, original_node: "Call") -> None: + pass + + @mark_no_op + def leave_ClassDef(self, original_node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_Colon(self, original_node: "Colon") -> None: + pass + + @mark_no_op + def leave_Comma(self, original_node: "Comma") -> None: + pass + + @mark_no_op + def leave_Comment(self, original_node: "Comment") -> None: + pass + + @mark_no_op + def leave_CompFor(self, original_node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompIf(self, original_node: "CompIf") -> None: + pass + + @mark_no_op + def leave_Comparison(self, original_node: "Comparison") -> None: + pass + + @mark_no_op + def leave_ComparisonTarget(self, original_node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString(self, original_node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_Continue(self, original_node: "Continue") -> None: + pass + + @mark_no_op + def leave_Decorator(self, original_node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Del(self, original_node: "Del") -> None: + pass + + @mark_no_op + def leave_Dict(self, original_node: "Dict") -> None: + pass + + @mark_no_op + def leave_DictComp(self, original_node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictElement(self, original_node: "DictElement") -> None: + pass + + @mark_no_op + def leave_Divide(self, original_node: "Divide") -> None: + pass + + @mark_no_op + def leave_DivideAssign(self, original_node: "DivideAssign") -> None: + pass + + @mark_no_op + def leave_Dot(self, original_node: "Dot") -> None: + pass + + @mark_no_op + def leave_Element(self, original_node: "Element") -> None: + pass + + @mark_no_op + def leave_Ellipsis(self, original_node: "Ellipsis") -> None: + pass + + @mark_no_op + def leave_Else(self, original_node: "Else") -> None: + pass + + @mark_no_op + def leave_EmptyLine(self, original_node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_Equal(self, original_node: "Equal") -> None: + pass + + @mark_no_op + def leave_ExceptHandler(self, original_node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler(self, original_node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_Expr(self, original_node: "Expr") -> None: + pass + + @mark_no_op + def leave_Finally(self, original_node: "Finally") -> None: + pass + + @mark_no_op + def leave_Float(self, original_node: "Float") -> None: + pass + + @mark_no_op + def leave_FloorDivide(self, original_node: "FloorDivide") -> None: + pass + + @mark_no_op + def leave_FloorDivideAssign(self, original_node: "FloorDivideAssign") -> None: + pass + + @mark_no_op + def leave_For(self, original_node: "For") -> None: + pass + + @mark_no_op + def leave_FormattedString(self, original_node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression( + self, original_node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringText(self, original_node: "FormattedStringText") -> None: + pass + + @mark_no_op + def leave_From(self, original_node: "From") -> None: + pass + + @mark_no_op + def leave_FunctionDef(self, original_node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_GeneratorExp(self, original_node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_Global(self, original_node: "Global") -> None: + pass + + @mark_no_op + def leave_GreaterThan(self, original_node: "GreaterThan") -> None: + pass + + @mark_no_op + def leave_GreaterThanEqual(self, original_node: "GreaterThanEqual") -> None: + pass + + @mark_no_op + def leave_If(self, original_node: "If") -> None: + pass + + @mark_no_op + def leave_IfExp(self, original_node: "IfExp") -> None: + pass + + @mark_no_op + def leave_Imaginary(self, original_node: "Imaginary") -> None: + pass + + @mark_no_op + def leave_Import(self, original_node: "Import") -> None: + pass + + @mark_no_op + def leave_ImportAlias(self, original_node: "ImportAlias") -> None: + pass + + @mark_no_op + def leave_ImportFrom(self, original_node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportStar(self, original_node: "ImportStar") -> None: + pass + + @mark_no_op + def leave_In(self, original_node: "In") -> None: + pass + + @mark_no_op + def leave_IndentedBlock(self, original_node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_Index(self, original_node: "Index") -> None: + pass + + @mark_no_op + def leave_Integer(self, original_node: "Integer") -> None: + pass + + @mark_no_op + def leave_Is(self, original_node: "Is") -> None: + pass + + @mark_no_op + def leave_IsNot(self, original_node: "IsNot") -> None: + pass + + @mark_no_op + def leave_Lambda(self, original_node: "Lambda") -> None: + pass + + @mark_no_op + def leave_LeftCurlyBrace(self, original_node: "LeftCurlyBrace") -> None: + pass + + @mark_no_op + def leave_LeftParen(self, original_node: "LeftParen") -> None: + pass + + @mark_no_op + def leave_LeftShift(self, original_node: "LeftShift") -> None: + pass + + @mark_no_op + def leave_LeftShiftAssign(self, original_node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def leave_LeftSquareBracket(self, original_node: "LeftSquareBracket") -> None: + pass + + @mark_no_op + def leave_LessThan(self, original_node: "LessThan") -> None: + pass + + @mark_no_op + def leave_LessThanEqual(self, original_node: "LessThanEqual") -> None: + pass + + @mark_no_op + def leave_List(self, original_node: "List") -> None: + pass + + @mark_no_op + def leave_ListComp(self, original_node: "ListComp") -> None: + pass + + @mark_no_op + def leave_Match(self, original_node: "Match") -> None: + pass + + @mark_no_op + def leave_MatchAs(self, original_node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchCase(self, original_node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchClass(self, original_node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement(self, original_node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchList(self, original_node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchMapping(self, original_node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement(self, original_node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchOr(self, original_node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOrElement(self, original_node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchPattern(self, original_node: "MatchPattern") -> None: + pass + + @mark_no_op + def leave_MatchSequence(self, original_node: "MatchSequence") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement(self, original_node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSingleton(self, original_node: "MatchSingleton") -> None: + pass + + @mark_no_op + def leave_MatchStar(self, original_node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchTuple(self, original_node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchValue(self, original_node: "MatchValue") -> None: + pass + + @mark_no_op + def leave_MatrixMultiply(self, original_node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def leave_MatrixMultiplyAssign(self, original_node: "MatrixMultiplyAssign") -> None: + pass + + @mark_no_op + def leave_Minus(self, original_node: "Minus") -> None: + pass + + @mark_no_op + def leave_Module(self, original_node: "Module") -> None: + pass + + @mark_no_op + def leave_Modulo(self, original_node: "Modulo") -> None: + pass + + @mark_no_op + def leave_ModuloAssign(self, original_node: "ModuloAssign") -> None: + pass + + @mark_no_op + def leave_Multiply(self, original_node: "Multiply") -> None: + pass + + @mark_no_op + def leave_MultiplyAssign(self, original_node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def leave_Name(self, original_node: "Name") -> None: + pass + + @mark_no_op + def leave_NameItem(self, original_node: "NameItem") -> None: + pass + + @mark_no_op + def leave_NamedExpr(self, original_node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_Newline(self, original_node: "Newline") -> None: + pass + + @mark_no_op + def leave_Nonlocal(self, original_node: "Nonlocal") -> None: + pass + + @mark_no_op + def leave_Not(self, original_node: "Not") -> None: + pass + + @mark_no_op + def leave_NotEqual(self, original_node: "NotEqual") -> None: + pass + + @mark_no_op + def leave_NotIn(self, original_node: "NotIn") -> None: + pass + + @mark_no_op + def leave_Or(self, original_node: "Or") -> None: + pass + + @mark_no_op + def leave_Param(self, original_node: "Param") -> None: + pass + + @mark_no_op + def leave_ParamSlash(self, original_node: "ParamSlash") -> None: + pass + + @mark_no_op + def leave_ParamStar(self, original_node: "ParamStar") -> None: + pass + + @mark_no_op + def leave_Parameters(self, original_node: "Parameters") -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace( + self, original_node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_Pass(self, original_node: "Pass") -> None: + pass + + @mark_no_op + def leave_Plus(self, original_node: "Plus") -> None: + pass + + @mark_no_op + def leave_Power(self, original_node: "Power") -> None: + pass + + @mark_no_op + def leave_PowerAssign(self, original_node: "PowerAssign") -> None: + pass + + @mark_no_op + def leave_Raise(self, original_node: "Raise") -> None: + pass + + @mark_no_op + def leave_Return(self, original_node: "Return") -> None: + pass + + @mark_no_op + def leave_RightCurlyBrace(self, original_node: "RightCurlyBrace") -> None: + pass + + @mark_no_op + def leave_RightParen(self, original_node: "RightParen") -> None: + pass + + @mark_no_op + def leave_RightShift(self, original_node: "RightShift") -> None: + pass + + @mark_no_op + def leave_RightShiftAssign(self, original_node: "RightShiftAssign") -> None: + pass + + @mark_no_op + def leave_RightSquareBracket(self, original_node: "RightSquareBracket") -> None: + pass + + @mark_no_op + def leave_Semicolon(self, original_node: "Semicolon") -> None: + pass + + @mark_no_op + def leave_Set(self, original_node: "Set") -> None: + pass + + @mark_no_op + def leave_SetComp(self, original_node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SimpleStatementLine(self, original_node: "SimpleStatementLine") -> None: + pass + + @mark_no_op + def leave_SimpleStatementSuite(self, original_node: "SimpleStatementSuite") -> None: + pass + + @mark_no_op + def leave_SimpleString(self, original_node: "SimpleString") -> None: + pass + + @mark_no_op + def leave_SimpleWhitespace(self, original_node: "SimpleWhitespace") -> None: + pass + + @mark_no_op + def leave_Slice(self, original_node: "Slice") -> None: + pass + + @mark_no_op + def leave_StarredDictElement(self, original_node: "StarredDictElement") -> None: + pass + + @mark_no_op + def leave_StarredElement(self, original_node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_Subscript(self, original_node: "Subscript") -> None: + pass + + @mark_no_op + def leave_SubscriptElement(self, original_node: "SubscriptElement") -> None: + pass + + @mark_no_op + def leave_Subtract(self, original_node: "Subtract") -> None: + pass + + @mark_no_op + def leave_SubtractAssign(self, original_node: "SubtractAssign") -> None: + pass + + @mark_no_op + def leave_TrailingWhitespace(self, original_node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def leave_Try(self, original_node: "Try") -> None: + pass + + @mark_no_op + def leave_TryStar(self, original_node: "TryStar") -> None: + pass + + @mark_no_op + def leave_Tuple(self, original_node: "Tuple") -> None: + pass + + @mark_no_op + def leave_UnaryOperation(self, original_node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_While(self, original_node: "While") -> None: + pass + + @mark_no_op + def leave_With(self, original_node: "With") -> None: + pass + + @mark_no_op + def leave_WithItem(self, original_node: "WithItem") -> None: + pass + + @mark_no_op + def leave_Yield(self, original_node: "Yield") -> None: + pass + + +class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): + @mark_no_op + def leave_Add(self, original_node: "Add", updated_node: "Add") -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_AddAssign( + self, original_node: "AddAssign", updated_node: "AddAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_And(self, original_node: "And", updated_node: "And") -> "BaseBooleanOp": + return updated_node + + @mark_no_op + def leave_AnnAssign( + self, original_node: "AnnAssign", updated_node: "AnnAssign" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Annotation( + self, original_node: "Annotation", updated_node: "Annotation" + ) -> "Annotation": + return updated_node + + @mark_no_op + def leave_Arg( + self, original_node: "Arg", updated_node: "Arg" + ) -> Union["Arg", FlattenSentinel["Arg"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_AsName(self, original_node: "AsName", updated_node: "AsName") -> "AsName": + return updated_node + + @mark_no_op + def leave_Assert( + self, original_node: "Assert", updated_node: "Assert" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Assign( + self, original_node: "Assign", updated_node: "Assign" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_AssignEqual( + self, original_node: "AssignEqual", updated_node: "AssignEqual" + ) -> Union["AssignEqual", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_AssignTarget( + self, original_node: "AssignTarget", updated_node: "AssignTarget" + ) -> Union["AssignTarget", FlattenSentinel["AssignTarget"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Asynchronous( + self, original_node: "Asynchronous", updated_node: "Asynchronous" + ) -> "Asynchronous": + return updated_node + + @mark_no_op + def leave_Attribute( + self, original_node: "Attribute", updated_node: "Attribute" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_AugAssign( + self, original_node: "AugAssign", updated_node: "AugAssign" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Await( + self, original_node: "Await", updated_node: "Await" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_BinaryOperation( + self, original_node: "BinaryOperation", updated_node: "BinaryOperation" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_BitAnd( + self, original_node: "BitAnd", updated_node: "BitAnd" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_BitAndAssign( + self, original_node: "BitAndAssign", updated_node: "BitAndAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_BitInvert( + self, original_node: "BitInvert", updated_node: "BitInvert" + ) -> "BaseUnaryOp": + return updated_node + + @mark_no_op + def leave_BitOr( + self, original_node: "BitOr", updated_node: "BitOr" + ) -> Union["BaseBinaryOp", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_BitOrAssign( + self, original_node: "BitOrAssign", updated_node: "BitOrAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_BitXor( + self, original_node: "BitXor", updated_node: "BitXor" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_BitXorAssign( + self, original_node: "BitXorAssign", updated_node: "BitXorAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_BooleanOperation( + self, original_node: "BooleanOperation", updated_node: "BooleanOperation" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Break( + self, original_node: "Break", updated_node: "Break" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Call( + self, original_node: "Call", updated_node: "Call" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_ClassDef( + self, original_node: "ClassDef", updated_node: "ClassDef" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Colon( + self, original_node: "Colon", updated_node: "Colon" + ) -> Union["Colon", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Comma( + self, original_node: "Comma", updated_node: "Comma" + ) -> Union["Comma", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Comment( + self, original_node: "Comment", updated_node: "Comment" + ) -> "Comment": + return updated_node + + @mark_no_op + def leave_CompFor( + self, original_node: "CompFor", updated_node: "CompFor" + ) -> "CompFor": + return updated_node + + @mark_no_op + def leave_CompIf(self, original_node: "CompIf", updated_node: "CompIf") -> "CompIf": + return updated_node + + @mark_no_op + def leave_Comparison( + self, original_node: "Comparison", updated_node: "Comparison" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_ComparisonTarget( + self, original_node: "ComparisonTarget", updated_node: "ComparisonTarget" + ) -> Union[ + "ComparisonTarget", FlattenSentinel["ComparisonTarget"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_ConcatenatedString( + self, original_node: "ConcatenatedString", updated_node: "ConcatenatedString" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Continue( + self, original_node: "Continue", updated_node: "Continue" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Decorator( + self, original_node: "Decorator", updated_node: "Decorator" + ) -> Union["Decorator", FlattenSentinel["Decorator"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Del( + self, original_node: "Del", updated_node: "Del" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Dict( + self, original_node: "Dict", updated_node: "Dict" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_DictComp( + self, original_node: "DictComp", updated_node: "DictComp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_DictElement( + self, original_node: "DictElement", updated_node: "DictElement" + ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Divide( + self, original_node: "Divide", updated_node: "Divide" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_DivideAssign( + self, original_node: "DivideAssign", updated_node: "DivideAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Dot( + self, original_node: "Dot", updated_node: "Dot" + ) -> Union["Dot", FlattenSentinel["Dot"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Element( + self, original_node: "Element", updated_node: "Element" + ) -> Union["BaseElement", FlattenSentinel["BaseElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Ellipsis( + self, original_node: "Ellipsis", updated_node: "Ellipsis" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Else(self, original_node: "Else", updated_node: "Else") -> "Else": + return updated_node + + @mark_no_op + def leave_EmptyLine( + self, original_node: "EmptyLine", updated_node: "EmptyLine" + ) -> Union["EmptyLine", FlattenSentinel["EmptyLine"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Equal( + self, original_node: "Equal", updated_node: "Equal" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_ExceptHandler( + self, original_node: "ExceptHandler", updated_node: "ExceptHandler" + ) -> Union["ExceptHandler", FlattenSentinel["ExceptHandler"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_ExceptStarHandler( + self, original_node: "ExceptStarHandler", updated_node: "ExceptStarHandler" + ) -> Union[ + "ExceptStarHandler", FlattenSentinel["ExceptStarHandler"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Expr( + self, original_node: "Expr", updated_node: "Expr" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Finally( + self, original_node: "Finally", updated_node: "Finally" + ) -> "Finally": + return updated_node + + @mark_no_op + def leave_Float( + self, original_node: "Float", updated_node: "Float" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_FloorDivide( + self, original_node: "FloorDivide", updated_node: "FloorDivide" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_FloorDivideAssign( + self, original_node: "FloorDivideAssign", updated_node: "FloorDivideAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_For( + self, original_node: "For", updated_node: "For" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_FormattedString( + self, original_node: "FormattedString", updated_node: "FormattedString" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_FormattedStringExpression( + self, + original_node: "FormattedStringExpression", + updated_node: "FormattedStringExpression", + ) -> Union[ + "BaseFormattedStringContent", + FlattenSentinel["BaseFormattedStringContent"], + RemovalSentinel, + ]: + return updated_node + + @mark_no_op + def leave_FormattedStringText( + self, original_node: "FormattedStringText", updated_node: "FormattedStringText" + ) -> Union[ + "BaseFormattedStringContent", + FlattenSentinel["BaseFormattedStringContent"], + RemovalSentinel, + ]: + return updated_node + + @mark_no_op + def leave_From(self, original_node: "From", updated_node: "From") -> "From": + return updated_node + + @mark_no_op + def leave_FunctionDef( + self, original_node: "FunctionDef", updated_node: "FunctionDef" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_GeneratorExp( + self, original_node: "GeneratorExp", updated_node: "GeneratorExp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Global( + self, original_node: "Global", updated_node: "Global" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_GreaterThan( + self, original_node: "GreaterThan", updated_node: "GreaterThan" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_GreaterThanEqual( + self, original_node: "GreaterThanEqual", updated_node: "GreaterThanEqual" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_If( + self, original_node: "If", updated_node: "If" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_IfExp( + self, original_node: "IfExp", updated_node: "IfExp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Imaginary( + self, original_node: "Imaginary", updated_node: "Imaginary" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Import( + self, original_node: "Import", updated_node: "Import" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_ImportAlias( + self, original_node: "ImportAlias", updated_node: "ImportAlias" + ) -> Union["ImportAlias", FlattenSentinel["ImportAlias"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_ImportFrom( + self, original_node: "ImportFrom", updated_node: "ImportFrom" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_ImportStar( + self, original_node: "ImportStar", updated_node: "ImportStar" + ) -> "ImportStar": + return updated_node + + @mark_no_op + def leave_In(self, original_node: "In", updated_node: "In") -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_IndentedBlock( + self, original_node: "IndentedBlock", updated_node: "IndentedBlock" + ) -> "BaseSuite": + return updated_node + + @mark_no_op + def leave_Index(self, original_node: "Index", updated_node: "Index") -> "BaseSlice": + return updated_node + + @mark_no_op + def leave_Integer( + self, original_node: "Integer", updated_node: "Integer" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Is(self, original_node: "Is", updated_node: "Is") -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_IsNot( + self, original_node: "IsNot", updated_node: "IsNot" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_Lambda( + self, original_node: "Lambda", updated_node: "Lambda" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_LeftCurlyBrace( + self, original_node: "LeftCurlyBrace", updated_node: "LeftCurlyBrace" + ) -> "LeftCurlyBrace": + return updated_node + + @mark_no_op + def leave_LeftParen( + self, original_node: "LeftParen", updated_node: "LeftParen" + ) -> Union[ + "LeftParen", MaybeSentinel, FlattenSentinel["LeftParen"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_LeftShift( + self, original_node: "LeftShift", updated_node: "LeftShift" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_LeftShiftAssign( + self, original_node: "LeftShiftAssign", updated_node: "LeftShiftAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_LeftSquareBracket( + self, original_node: "LeftSquareBracket", updated_node: "LeftSquareBracket" + ) -> "LeftSquareBracket": + return updated_node + + @mark_no_op + def leave_LessThan( + self, original_node: "LessThan", updated_node: "LessThan" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_LessThanEqual( + self, original_node: "LessThanEqual", updated_node: "LessThanEqual" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_List( + self, original_node: "List", updated_node: "List" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_ListComp( + self, original_node: "ListComp", updated_node: "ListComp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Match( + self, original_node: "Match", updated_node: "Match" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_MatchAs( + self, original_node: "MatchAs", updated_node: "MatchAs" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchCase( + self, original_node: "MatchCase", updated_node: "MatchCase" + ) -> "MatchCase": + return updated_node + + @mark_no_op + def leave_MatchClass( + self, original_node: "MatchClass", updated_node: "MatchClass" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchKeywordElement( + self, original_node: "MatchKeywordElement", updated_node: "MatchKeywordElement" + ) -> Union[ + "MatchKeywordElement", FlattenSentinel["MatchKeywordElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchList( + self, original_node: "MatchList", updated_node: "MatchList" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchMapping( + self, original_node: "MatchMapping", updated_node: "MatchMapping" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchMappingElement( + self, original_node: "MatchMappingElement", updated_node: "MatchMappingElement" + ) -> Union[ + "MatchMappingElement", FlattenSentinel["MatchMappingElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchOr( + self, original_node: "MatchOr", updated_node: "MatchOr" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchOrElement( + self, original_node: "MatchOrElement", updated_node: "MatchOrElement" + ) -> Union["MatchOrElement", FlattenSentinel["MatchOrElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_MatchPattern( + self, original_node: "MatchPattern", updated_node: "MatchPattern" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchSequence( + self, original_node: "MatchSequence", updated_node: "MatchSequence" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchSequenceElement( + self, + original_node: "MatchSequenceElement", + updated_node: "MatchSequenceElement", + ) -> Union[ + "MatchSequenceElement", FlattenSentinel["MatchSequenceElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchSingleton( + self, original_node: "MatchSingleton", updated_node: "MatchSingleton" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchStar( + self, original_node: "MatchStar", updated_node: "MatchStar" + ) -> "MatchStar": + return updated_node + + @mark_no_op + def leave_MatchTuple( + self, original_node: "MatchTuple", updated_node: "MatchTuple" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchValue( + self, original_node: "MatchValue", updated_node: "MatchValue" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatrixMultiply( + self, original_node: "MatrixMultiply", updated_node: "MatrixMultiply" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_MatrixMultiplyAssign( + self, + original_node: "MatrixMultiplyAssign", + updated_node: "MatrixMultiplyAssign", + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Minus( + self, original_node: "Minus", updated_node: "Minus" + ) -> "BaseUnaryOp": + return updated_node + + @mark_no_op + def leave_Module(self, original_node: "Module", updated_node: "Module") -> "Module": + return updated_node + + @mark_no_op + def leave_Modulo( + self, original_node: "Modulo", updated_node: "Modulo" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_ModuloAssign( + self, original_node: "ModuloAssign", updated_node: "ModuloAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Multiply( + self, original_node: "Multiply", updated_node: "Multiply" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_MultiplyAssign( + self, original_node: "MultiplyAssign", updated_node: "MultiplyAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Name( + self, original_node: "Name", updated_node: "Name" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_NameItem( + self, original_node: "NameItem", updated_node: "NameItem" + ) -> Union["NameItem", FlattenSentinel["NameItem"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_NamedExpr( + self, original_node: "NamedExpr", updated_node: "NamedExpr" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Newline( + self, original_node: "Newline", updated_node: "Newline" + ) -> "Newline": + return updated_node + + @mark_no_op + def leave_Nonlocal( + self, original_node: "Nonlocal", updated_node: "Nonlocal" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Not(self, original_node: "Not", updated_node: "Not") -> "BaseUnaryOp": + return updated_node + + @mark_no_op + def leave_NotEqual( + self, original_node: "NotEqual", updated_node: "NotEqual" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_NotIn( + self, original_node: "NotIn", updated_node: "NotIn" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_Or(self, original_node: "Or", updated_node: "Or") -> "BaseBooleanOp": + return updated_node + + @mark_no_op + def leave_Param( + self, original_node: "Param", updated_node: "Param" + ) -> Union["Param", MaybeSentinel, FlattenSentinel["Param"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_ParamSlash( + self, original_node: "ParamSlash", updated_node: "ParamSlash" + ) -> Union["ParamSlash", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_ParamStar( + self, original_node: "ParamStar", updated_node: "ParamStar" + ) -> Union["ParamStar", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Parameters( + self, original_node: "Parameters", updated_node: "Parameters" + ) -> "Parameters": + return updated_node + + @mark_no_op + def leave_ParenthesizedWhitespace( + self, + original_node: "ParenthesizedWhitespace", + updated_node: "ParenthesizedWhitespace", + ) -> Union["BaseParenthesizableWhitespace", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Pass( + self, original_node: "Pass", updated_node: "Pass" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Plus(self, original_node: "Plus", updated_node: "Plus") -> "BaseUnaryOp": + return updated_node + + @mark_no_op + def leave_Power( + self, original_node: "Power", updated_node: "Power" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_PowerAssign( + self, original_node: "PowerAssign", updated_node: "PowerAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Raise( + self, original_node: "Raise", updated_node: "Raise" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Return( + self, original_node: "Return", updated_node: "Return" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_RightCurlyBrace( + self, original_node: "RightCurlyBrace", updated_node: "RightCurlyBrace" + ) -> "RightCurlyBrace": + return updated_node + + @mark_no_op + def leave_RightParen( + self, original_node: "RightParen", updated_node: "RightParen" + ) -> Union[ + "RightParen", MaybeSentinel, FlattenSentinel["RightParen"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_RightShift( + self, original_node: "RightShift", updated_node: "RightShift" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_RightShiftAssign( + self, original_node: "RightShiftAssign", updated_node: "RightShiftAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_RightSquareBracket( + self, original_node: "RightSquareBracket", updated_node: "RightSquareBracket" + ) -> "RightSquareBracket": + return updated_node + + @mark_no_op + def leave_Semicolon( + self, original_node: "Semicolon", updated_node: "Semicolon" + ) -> Union["Semicolon", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Set(self, original_node: "Set", updated_node: "Set") -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_SetComp( + self, original_node: "SetComp", updated_node: "SetComp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_SimpleStatementLine( + self, original_node: "SimpleStatementLine", updated_node: "SimpleStatementLine" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_SimpleStatementSuite( + self, + original_node: "SimpleStatementSuite", + updated_node: "SimpleStatementSuite", + ) -> "BaseSuite": + return updated_node + + @mark_no_op + def leave_SimpleString( + self, original_node: "SimpleString", updated_node: "SimpleString" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_SimpleWhitespace( + self, original_node: "SimpleWhitespace", updated_node: "SimpleWhitespace" + ) -> Union["BaseParenthesizableWhitespace", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Slice(self, original_node: "Slice", updated_node: "Slice") -> "BaseSlice": + return updated_node + + @mark_no_op + def leave_StarredDictElement( + self, original_node: "StarredDictElement", updated_node: "StarredDictElement" + ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_StarredElement( + self, original_node: "StarredElement", updated_node: "StarredElement" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Subscript( + self, original_node: "Subscript", updated_node: "Subscript" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_SubscriptElement( + self, original_node: "SubscriptElement", updated_node: "SubscriptElement" + ) -> Union[ + "SubscriptElement", FlattenSentinel["SubscriptElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Subtract( + self, original_node: "Subtract", updated_node: "Subtract" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_SubtractAssign( + self, original_node: "SubtractAssign", updated_node: "SubtractAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_TrailingWhitespace( + self, original_node: "TrailingWhitespace", updated_node: "TrailingWhitespace" + ) -> "TrailingWhitespace": + return updated_node + + @mark_no_op + def leave_Try( + self, original_node: "Try", updated_node: "Try" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_TryStar( + self, original_node: "TryStar", updated_node: "TryStar" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Tuple( + self, original_node: "Tuple", updated_node: "Tuple" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_UnaryOperation( + self, original_node: "UnaryOperation", updated_node: "UnaryOperation" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_While( + self, original_node: "While", updated_node: "While" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_With( + self, original_node: "With", updated_node: "With" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_WithItem( + self, original_node: "WithItem", updated_node: "WithItem" + ) -> Union["WithItem", FlattenSentinel["WithItem"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Yield( + self, original_node: "Yield", updated_node: "Yield" + ) -> "BaseExpression": + return updated_node diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 1fc23557..8323578c 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -1,15927 +1,15927 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -# This file was generated by libcst.codegen.gen_matcher_classes -from dataclasses import dataclass -from typing import Optional, Sequence, Union - -from typing_extensions import Literal - -import libcst as cst -from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit - -from libcst.matchers._matcher_base import ( - AbstractBaseMatcherNodeMeta, - AllOf, - AtLeastN, - AtMostN, - BaseMatcherNode, - DoesNotMatch, - DoNotCare, - DoNotCareSentinel, - extract, - extractall, - findall, - matches, - MatchIfTrue, - MatchMetadata, - MatchMetadataIfTrue, - MatchRegex, - OneOf, - replace, - SaveMatchedNode, - TypeOf, - ZeroOrMore, - ZeroOrOne, -) -from libcst.matchers._visitors import ( - MatchDecoratorMismatch, - MatcherDecoratableTransformer, - MatcherDecoratableVisitor, -) - - -class _NodeABC(metaclass=AbstractBaseMatcherNodeMeta): - __slots__ = () - - -class BaseAssignTargetExpression(_NodeABC): - pass - - -class BaseAugOp(_NodeABC): - pass - - -class BaseBinaryOp(_NodeABC): - pass - - -class BaseBooleanOp(_NodeABC): - pass - - -class BaseComp(_NodeABC): - pass - - -class BaseCompOp(_NodeABC): - pass - - -class BaseCompoundStatement(_NodeABC): - pass - - -class BaseDelTargetExpression(_NodeABC): - pass - - -class BaseDict(_NodeABC): - pass - - -class BaseDictElement(_NodeABC): - pass - - -class BaseElement(_NodeABC): - pass - - -class BaseExpression(_NodeABC): - pass - - -class BaseFormattedStringContent(_NodeABC): - pass - - -class BaseList(_NodeABC): - pass - - -class BaseMetadataProvider(_NodeABC): - pass - - -class BaseNumber(_NodeABC): - pass - - -class BaseParenthesizableWhitespace(_NodeABC): - pass - - -class BaseSet(_NodeABC): - pass - - -class BaseSimpleComp(_NodeABC): - pass - - -class BaseSlice(_NodeABC): - pass - - -class BaseSmallStatement(_NodeABC): - pass - - -class BaseStatement(_NodeABC): - pass - - -class BaseString(_NodeABC): - pass - - -class BaseSuite(_NodeABC): - pass - - -class BaseUnaryOp(_NodeABC): - pass - - -MetadataMatchType = Union[MatchMetadata, MatchMetadataIfTrue] - - -BaseParenthesizableWhitespaceMatchType = Union[ - "BaseParenthesizableWhitespace", - MetadataMatchType, - MatchIfTrue[cst.BaseParenthesizableWhitespace], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Add(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AddAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class And(BaseBooleanOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseAssignTargetExpressionMatchType = Union[ - "BaseAssignTargetExpression", - MetadataMatchType, - MatchIfTrue[cst.BaseAssignTargetExpression], -] -AnnotationMatchType = Union[ - "Annotation", MetadataMatchType, MatchIfTrue[cst.Annotation] -] -AssignEqualMatchType = Union[ - "AssignEqual", MetadataMatchType, MatchIfTrue[cst.AssignEqual] -] -SemicolonMatchType = Union["Semicolon", MetadataMatchType, MatchIfTrue[cst.Semicolon]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AnnAssign(BaseSmallStatement, BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - annotation: Union[ - AnnotationMatchType, - DoNotCareSentinel, - OneOf[AnnotationMatchType], - AllOf[AnnotationMatchType], - ] = DoNotCare() - value: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - equal: Union[ - AssignEqualMatchType, - DoNotCareSentinel, - OneOf[AssignEqualMatchType], - AllOf[AssignEqualMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseExpressionMatchType = Union[ - "BaseExpression", MetadataMatchType, MatchIfTrue[cst.BaseExpression] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Annotation(BaseMatcherNode): - annotation: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - whitespace_before_indicator: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_indicator: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -CommaMatchType = Union["Comma", MetadataMatchType, MatchIfTrue[cst.Comma]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Arg(BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - keyword: Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Name]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - AllOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - ] = DoNotCare() - equal: Union[ - AssignEqualMatchType, - DoNotCareSentinel, - OneOf[AssignEqualMatchType], - AllOf[AssignEqualMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - star: Union[ - Literal["", "*", "**"], - MetadataMatchType, - MatchIfTrue[Literal["", "*", "**"]], - DoNotCareSentinel, - OneOf[ - Union[ - Literal["", "*", "**"], - MetadataMatchType, - MatchIfTrue[Literal["", "*", "**"]], - ] - ], - AllOf[ - Union[ - Literal["", "*", "**"], - MetadataMatchType, - MatchIfTrue[Literal["", "*", "**"]], - ] - ], - ] = DoNotCare() - whitespace_after_star: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_arg: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -NameOrTupleOrListMatchType = Union[ - "Name", - "Tuple", - "List", - MetadataMatchType, - MatchIfTrue[Union[cst.Name, cst.Tuple, cst.List]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AsName(BaseMatcherNode): - name: Union[ - NameOrTupleOrListMatchType, - DoNotCareSentinel, - OneOf[NameOrTupleOrListMatchType], - AllOf[NameOrTupleOrListMatchType], - ] = DoNotCare() - whitespace_before_as: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_as: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -SimpleWhitespaceMatchType = Union[ - "SimpleWhitespace", MetadataMatchType, MatchIfTrue[cst.SimpleWhitespace] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Assert(BaseSmallStatement, BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - msg: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_after_assert: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -AssignTargetMatchType = Union[ - "AssignTarget", MetadataMatchType, MatchIfTrue[cst.AssignTarget] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Assign(BaseSmallStatement, BaseMatcherNode): - targets: Union[ - Sequence[ - Union[ - AssignTargetMatchType, - DoNotCareSentinel, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - AtLeastN[ - Union[ - AssignTargetMatchType, - DoNotCareSentinel, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - AtMostN[ - Union[ - AssignTargetMatchType, - DoNotCareSentinel, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.AssignTarget]], - OneOf[ - Union[ - Sequence[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - AtLeastN[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - AtMostN[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.AssignTarget]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - AtLeastN[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - AtMostN[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.AssignTarget]], - ] - ], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AssignEqual(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AssignTarget(BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - whitespace_before_equal: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_equal: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Asynchronous(BaseMatcherNode): - whitespace_after: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -NameMatchType = Union["Name", MetadataMatchType, MatchIfTrue[cst.Name]] -DotMatchType = Union["Dot", MetadataMatchType, MatchIfTrue[cst.Dot]] -LeftParenMatchType = Union["LeftParen", MetadataMatchType, MatchIfTrue[cst.LeftParen]] -RightParenMatchType = Union[ - "RightParen", MetadataMatchType, MatchIfTrue[cst.RightParen] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Attribute( - BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode -): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - attr: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - dot: Union[ - DotMatchType, DoNotCareSentinel, OneOf[DotMatchType], AllOf[DotMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseAugOpMatchType = Union["BaseAugOp", MetadataMatchType, MatchIfTrue[cst.BaseAugOp]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AugAssign(BaseSmallStatement, BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - operator: Union[ - BaseAugOpMatchType, - DoNotCareSentinel, - OneOf[BaseAugOpMatchType], - AllOf[BaseAugOpMatchType], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Await(BaseExpression, BaseMatcherNode): - expression: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_await: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseBinaryOpMatchType = Union[ - "BaseBinaryOp", MetadataMatchType, MatchIfTrue[cst.BaseBinaryOp] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BinaryOperation(BaseExpression, BaseMatcherNode): - left: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - operator: Union[ - BaseBinaryOpMatchType, - DoNotCareSentinel, - OneOf[BaseBinaryOpMatchType], - AllOf[BaseBinaryOpMatchType], - ] = DoNotCare() - right: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitAnd(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitAndAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitInvert(BaseUnaryOp, BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitOr(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitOrAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitXor(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitXorAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseBooleanOpMatchType = Union[ - "BaseBooleanOp", MetadataMatchType, MatchIfTrue[cst.BaseBooleanOp] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BooleanOperation(BaseExpression, BaseMatcherNode): - left: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - operator: Union[ - BaseBooleanOpMatchType, - DoNotCareSentinel, - OneOf[BaseBooleanOpMatchType], - AllOf[BaseBooleanOpMatchType], - ] = DoNotCare() - right: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Break(BaseSmallStatement, BaseMatcherNode): - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ArgMatchType = Union["Arg", MetadataMatchType, MatchIfTrue[cst.Arg]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Call(BaseExpression, BaseMatcherNode): - func: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - args: Union[ - Sequence[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - AtMostN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Arg]], - OneOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_func: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_args: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseSuiteMatchType = Union["BaseSuite", MetadataMatchType, MatchIfTrue[cst.BaseSuite]] -DecoratorMatchType = Union["Decorator", MetadataMatchType, MatchIfTrue[cst.Decorator]] -EmptyLineMatchType = Union["EmptyLine", MetadataMatchType, MatchIfTrue[cst.EmptyLine]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - bases: Union[ - Sequence[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - AtMostN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Arg]], - OneOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - ] = DoNotCare() - keywords: Union[ - Sequence[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - AtMostN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Arg]], - OneOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - ] = DoNotCare() - decorators: Union[ - Sequence[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Decorator]], - OneOf[ - Union[ - Sequence[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Decorator]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Decorator]], - ] - ], - ] = DoNotCare() - lpar: Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] = DoNotCare() - rpar: Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - lines_after_decorators: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_class: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_name: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Colon(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Comma(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -strMatchType = Union[str, MetadataMatchType, MatchIfTrue[str]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Comment(BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -CompIfMatchType = Union["CompIf", MetadataMatchType, MatchIfTrue[cst.CompIf]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class CompFor(BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - iter: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - ifs: Union[ - Sequence[ - Union[ - CompIfMatchType, - DoNotCareSentinel, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - AtLeastN[ - Union[ - CompIfMatchType, - DoNotCareSentinel, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - AtMostN[ - Union[ - CompIfMatchType, - DoNotCareSentinel, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.CompIf]], - OneOf[ - Union[ - Sequence[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - AtLeastN[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - AtMostN[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.CompIf]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - AtLeastN[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - AtMostN[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.CompIf]], - ] - ], - ] = DoNotCare() - inner_for_in: Union[ - Optional["CompFor"], - MetadataMatchType, - MatchIfTrue[Optional[cst.CompFor]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["CompFor"], - MetadataMatchType, - MatchIfTrue[Optional[cst.CompFor]], - ] - ], - AllOf[ - Union[ - Optional["CompFor"], - MetadataMatchType, - MatchIfTrue[Optional[cst.CompFor]], - ] - ], - ] = DoNotCare() - asynchronous: Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - AllOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - ] = DoNotCare() - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_for: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_in: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_in: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class CompIf(BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_test: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ComparisonTargetMatchType = Union[ - "ComparisonTarget", MetadataMatchType, MatchIfTrue[cst.ComparisonTarget] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Comparison(BaseExpression, BaseMatcherNode): - left: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comparisons: Union[ - Sequence[ - Union[ - ComparisonTargetMatchType, - DoNotCareSentinel, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - AtLeastN[ - Union[ - ComparisonTargetMatchType, - DoNotCareSentinel, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - AtMostN[ - Union[ - ComparisonTargetMatchType, - DoNotCareSentinel, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ComparisonTarget]], - OneOf[ - Union[ - Sequence[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - AtLeastN[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - AtMostN[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ComparisonTarget]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - AtLeastN[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - AtMostN[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ComparisonTarget]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseCompOpMatchType = Union[ - "BaseCompOp", MetadataMatchType, MatchIfTrue[cst.BaseCompOp] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ComparisonTarget(BaseMatcherNode): - operator: Union[ - BaseCompOpMatchType, - DoNotCareSentinel, - OneOf[BaseCompOpMatchType], - AllOf[BaseCompOpMatchType], - ] = DoNotCare() - comparator: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -SimpleStringOrFormattedStringMatchType = Union[ - "SimpleString", - "FormattedString", - MetadataMatchType, - MatchIfTrue[Union[cst.SimpleString, cst.FormattedString]], -] -SimpleStringOrFormattedStringOrConcatenatedStringMatchType = Union[ - "SimpleString", - "FormattedString", - "ConcatenatedString", - MetadataMatchType, - MatchIfTrue[Union[cst.SimpleString, cst.FormattedString, cst.ConcatenatedString]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): - left: Union[ - SimpleStringOrFormattedStringMatchType, - DoNotCareSentinel, - OneOf[SimpleStringOrFormattedStringMatchType], - AllOf[SimpleStringOrFormattedStringMatchType], - ] = DoNotCare() - right: Union[ - SimpleStringOrFormattedStringOrConcatenatedStringMatchType, - DoNotCareSentinel, - OneOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType], - AllOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_between: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Continue(BaseSmallStatement, BaseMatcherNode): - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -TrailingWhitespaceMatchType = Union[ - "TrailingWhitespace", MetadataMatchType, MatchIfTrue[cst.TrailingWhitespace] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Decorator(BaseMatcherNode): - decorator: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_at: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - trailing_whitespace: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseDelTargetExpressionMatchType = Union[ - "BaseDelTargetExpression", - MetadataMatchType, - MatchIfTrue[cst.BaseDelTargetExpression], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Del(BaseSmallStatement, BaseMatcherNode): - target: Union[ - BaseDelTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseDelTargetExpressionMatchType], - AllOf[BaseDelTargetExpressionMatchType], - ] = DoNotCare() - whitespace_after_del: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseDictElementMatchType = Union[ - "BaseDictElement", MetadataMatchType, MatchIfTrue[cst.BaseDictElement] -] -LeftCurlyBraceMatchType = Union[ - "LeftCurlyBrace", MetadataMatchType, MatchIfTrue[cst.LeftCurlyBrace] -] -RightCurlyBraceMatchType = Union[ - "RightCurlyBrace", MetadataMatchType, MatchIfTrue[cst.RightCurlyBrace] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Dict(BaseDict, BaseExpression, BaseMatcherNode): - elements: Union[ - Sequence[ - Union[ - BaseDictElementMatchType, - DoNotCareSentinel, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - AtLeastN[ - Union[ - BaseDictElementMatchType, - DoNotCareSentinel, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseDictElementMatchType, - DoNotCareSentinel, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseDictElement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - AtLeastN[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseDictElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - AtLeastN[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseDictElement]], - ] - ], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -CompForMatchType = Union["CompFor", MetadataMatchType, MatchIfTrue[cst.CompFor]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): - key: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - value: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - for_in: Union[ - CompForMatchType, - DoNotCareSentinel, - OneOf[CompForMatchType], - AllOf[CompForMatchType], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class DictElement(BaseDictElement, BaseMatcherNode): - key: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Divide(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class DivideAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Dot(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Element(BaseElement, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Ellipsis(BaseExpression, BaseMatcherNode): - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Else(BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -boolMatchType = Union[bool, MetadataMatchType, MatchIfTrue[bool]] -NewlineMatchType = Union["Newline", MetadataMatchType, MatchIfTrue[cst.Newline]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class EmptyLine(BaseMatcherNode): - indent: Union[ - boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] - ] = DoNotCare() - whitespace: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - comment: Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - ] - ], - AllOf[ - Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - ] - ], - ] = DoNotCare() - newline: Union[ - NewlineMatchType, - DoNotCareSentinel, - OneOf[NewlineMatchType], - AllOf[NewlineMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Equal(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ExceptHandler(BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - type: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - name: Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AsName]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - AllOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_except: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ExceptStarHandler(BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - type: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - name: Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AsName]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - AllOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_except: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_star: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Expr(BaseSmallStatement, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Finally(BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Float(BaseExpression, BaseNumber, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FloorDivide(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FloorDivideAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - iter: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - orelse: Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Else]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - AllOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - ] = DoNotCare() - asynchronous: Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - AllOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_for: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_in: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_in: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseFormattedStringContentMatchType = Union[ - "BaseFormattedStringContent", - MetadataMatchType, - MatchIfTrue[cst.BaseFormattedStringContent], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FormattedString(BaseExpression, BaseString, BaseMatcherNode): - parts: Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], - ] - ], - ] = DoNotCare() - start: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - end: Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - DoNotCareSentinel, - OneOf[ - Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - ] - ], - AllOf[ - Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): - expression: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - conversion: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - format_spec: Union[ - Optional[Sequence["BaseFormattedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional[Sequence["BaseFormattedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], - ] - ], - AllOf[ - Union[ - Optional[Sequence["BaseFormattedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], - ] - ], - ] = DoNotCare() - whitespace_before_expression: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_expression: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - equal: Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - ] - ], - AllOf[ - Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FormattedStringText(BaseFormattedStringContent, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class From(BaseMatcherNode): - item: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - whitespace_before_from: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_from: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ParametersMatchType = Union[ - "Parameters", MetadataMatchType, MatchIfTrue[cst.Parameters] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - params: Union[ - ParametersMatchType, - DoNotCareSentinel, - OneOf[ParametersMatchType], - AllOf[ParametersMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - decorators: Union[ - Sequence[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Decorator]], - OneOf[ - Union[ - Sequence[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Decorator]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Decorator]], - ] - ], - ] = DoNotCare() - returns: Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - ] - ], - AllOf[ - Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - ] - ], - ] = DoNotCare() - asynchronous: Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - AllOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - lines_after_decorators: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_def: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_name: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_params: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): - elt: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - for_in: Union[ - CompForMatchType, - DoNotCareSentinel, - OneOf[CompForMatchType], - AllOf[CompForMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -NameItemMatchType = Union["NameItem", MetadataMatchType, MatchIfTrue[cst.NameItem]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Global(BaseSmallStatement, BaseMatcherNode): - names: Union[ - Sequence[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.NameItem]], - OneOf[ - Union[ - Sequence[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.NameItem]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.NameItem]], - ] - ], - ] = DoNotCare() - whitespace_after_global: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class GreaterThan(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class GreaterThanEqual(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -IfOrElseOrNoneMatchType = Union[ - "If", "Else", None, MetadataMatchType, MatchIfTrue[Union[cst.If, cst.Else, None]] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class If(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - orelse: Union[ - IfOrElseOrNoneMatchType, - DoNotCareSentinel, - OneOf[IfOrElseOrNoneMatchType], - AllOf[IfOrElseOrNoneMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_test: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_test: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class IfExp(BaseExpression, BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - body: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - orelse: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_before_if: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_if: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_else: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_else: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ImportAliasMatchType = Union[ - "ImportAlias", MetadataMatchType, MatchIfTrue[cst.ImportAlias] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Import(BaseSmallStatement, BaseMatcherNode): - names: Union[ - Sequence[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ImportAlias]], - OneOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - whitespace_after_import: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -AttributeOrNameMatchType = Union[ - "Attribute", "Name", MetadataMatchType, MatchIfTrue[Union[cst.Attribute, cst.Name]] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ImportAlias(BaseMatcherNode): - name: Union[ - AttributeOrNameMatchType, - DoNotCareSentinel, - OneOf[AttributeOrNameMatchType], - AllOf[AttributeOrNameMatchType], - ] = DoNotCare() - asname: Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AsName]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - AllOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -AttributeOrNameOrNoneMatchType = Union[ - "Attribute", - "Name", - None, - MetadataMatchType, - MatchIfTrue[Union[cst.Attribute, cst.Name, None]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ImportFrom(BaseSmallStatement, BaseMatcherNode): - module: Union[ - AttributeOrNameOrNoneMatchType, - DoNotCareSentinel, - OneOf[AttributeOrNameOrNoneMatchType], - AllOf[AttributeOrNameOrNoneMatchType], - ] = DoNotCare() - names: Union[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ImportAlias]], - OneOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - ], - "ImportStar", - MetadataMatchType, - MatchIfTrue[ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - DoNotCareSentinel, - OneOf[ - Union[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - OneOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - ], - "ImportStar", - MetadataMatchType, - MatchIfTrue[ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - ] - ], - AllOf[ - Union[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - OneOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - ], - "ImportStar", - MetadataMatchType, - MatchIfTrue[ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - ] - ], - ] = DoNotCare() - relative: Union[ - Sequence[ - Union[ - DotMatchType, - DoNotCareSentinel, - OneOf[DotMatchType], - AllOf[DotMatchType], - AtLeastN[ - Union[ - DotMatchType, - DoNotCareSentinel, - OneOf[DotMatchType], - AllOf[DotMatchType], - ] - ], - AtMostN[ - Union[ - DotMatchType, - DoNotCareSentinel, - OneOf[DotMatchType], - AllOf[DotMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Dot]], - OneOf[ - Union[ - Sequence[ - Union[ - DotMatchType, - OneOf[DotMatchType], - AllOf[DotMatchType], - AtLeastN[ - Union[ - DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] - ] - ], - AtMostN[ - Union[ - DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Dot]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - DotMatchType, - OneOf[DotMatchType], - AllOf[DotMatchType], - AtLeastN[ - Union[ - DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] - ] - ], - AtMostN[ - Union[ - DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Dot]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Optional["LeftParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftParen]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["LeftParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Optional["LeftParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Optional["RightParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightParen]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["RightParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Optional["RightParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightParen]], - ] - ], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - whitespace_after_from: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_import: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_import: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ImportStar(BaseMatcherNode): - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class In(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseStatementMatchType = Union[ - "BaseStatement", MetadataMatchType, MatchIfTrue[cst.BaseStatement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class IndentedBlock(BaseSuite, BaseMatcherNode): - body: Union[ - Sequence[ - Union[ - BaseStatementMatchType, - DoNotCareSentinel, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - AtLeastN[ - Union[ - BaseStatementMatchType, - DoNotCareSentinel, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseStatementMatchType, - DoNotCareSentinel, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseStatement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - AtLeastN[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseStatement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - AtLeastN[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseStatement]], - ] - ], - ] = DoNotCare() - header: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - indent: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - footer: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Index(BaseSlice, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - star: Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - ] - ], - AllOf[ - Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - ] - ], - ] = DoNotCare() - whitespace_after_star: Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - ] - ], - AllOf[ - Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Integer(BaseExpression, BaseNumber, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Is(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class IsNot(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_between: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ColonMatchType = Union["Colon", MetadataMatchType, MatchIfTrue[cst.Colon]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Lambda(BaseExpression, BaseMatcherNode): - params: Union[ - ParametersMatchType, - DoNotCareSentinel, - OneOf[ParametersMatchType], - AllOf[ParametersMatchType], - ] = DoNotCare() - body: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - colon: Union[ - ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_lambda: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftCurlyBrace(BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftParen(BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftShift(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftShiftAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftSquareBracket(BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LessThan(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LessThanEqual(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseElementMatchType = Union[ - "BaseElement", MetadataMatchType, MatchIfTrue[cst.BaseElement] -] -LeftSquareBracketMatchType = Union[ - "LeftSquareBracket", MetadataMatchType, MatchIfTrue[cst.LeftSquareBracket] -] -RightSquareBracketMatchType = Union[ - "RightSquareBracket", MetadataMatchType, MatchIfTrue[cst.RightSquareBracket] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class List( - BaseAssignTargetExpression, - BaseDelTargetExpression, - BaseExpression, - BaseList, - BaseMatcherNode, -): - elements: Union[ - Sequence[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseElement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - ] = DoNotCare() - lbracket: Union[ - LeftSquareBracketMatchType, - DoNotCareSentinel, - OneOf[LeftSquareBracketMatchType], - AllOf[LeftSquareBracketMatchType], - ] = DoNotCare() - rbracket: Union[ - RightSquareBracketMatchType, - DoNotCareSentinel, - OneOf[RightSquareBracketMatchType], - AllOf[RightSquareBracketMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNode): - elt: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - for_in: Union[ - CompForMatchType, - DoNotCareSentinel, - OneOf[CompForMatchType], - AllOf[CompForMatchType], - ] = DoNotCare() - lbracket: Union[ - LeftSquareBracketMatchType, - DoNotCareSentinel, - OneOf[LeftSquareBracketMatchType], - AllOf[LeftSquareBracketMatchType], - ] = DoNotCare() - rbracket: Union[ - RightSquareBracketMatchType, - DoNotCareSentinel, - OneOf[RightSquareBracketMatchType], - AllOf[RightSquareBracketMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchCaseMatchType = Union["MatchCase", MetadataMatchType, MatchIfTrue[cst.MatchCase]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Match(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - subject: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - cases: Union[ - Sequence[ - Union[ - MatchCaseMatchType, - DoNotCareSentinel, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - AtLeastN[ - Union[ - MatchCaseMatchType, - DoNotCareSentinel, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - AtMostN[ - Union[ - MatchCaseMatchType, - DoNotCareSentinel, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchCase]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - AtLeastN[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - AtMostN[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchCase]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - AtLeastN[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - AtMostN[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchCase]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_match: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_colon: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - indent: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - footer: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchAs(BaseMatcherNode): - pattern: Union[ - Optional["MatchPattern"], - MetadataMatchType, - MatchIfTrue[Optional[cst.MatchPattern]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["MatchPattern"], - MetadataMatchType, - MatchIfTrue[Optional[cst.MatchPattern]], - ] - ], - AllOf[ - Union[ - Optional["MatchPattern"], - MetadataMatchType, - MatchIfTrue[Optional[cst.MatchPattern]], - ] - ], - ] = DoNotCare() - name: Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Name]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - AllOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - ] = DoNotCare() - whitespace_before_as: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_as: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchPatternMatchType = Union[ - "MatchPattern", MetadataMatchType, MatchIfTrue[cst.MatchPattern] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchCase(BaseMatcherNode): - pattern: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - guard: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_case: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_if: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_if: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchSequenceElementMatchType = Union[ - "MatchSequenceElement", MetadataMatchType, MatchIfTrue[cst.MatchSequenceElement] -] -MatchKeywordElementMatchType = Union[ - "MatchKeywordElement", MetadataMatchType, MatchIfTrue[cst.MatchKeywordElement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchClass(BaseMatcherNode): - cls: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - patterns: Union[ - Sequence[ - Union[ - MatchSequenceElementMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - AtLeastN[ - Union[ - MatchSequenceElementMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchSequenceElement]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - AtLeastN[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchSequenceElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - AtLeastN[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchSequenceElement]], - ] - ], - ] = DoNotCare() - kwds: Union[ - Sequence[ - Union[ - MatchKeywordElementMatchType, - DoNotCareSentinel, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - AtLeastN[ - Union[ - MatchKeywordElementMatchType, - DoNotCareSentinel, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchKeywordElementMatchType, - DoNotCareSentinel, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchKeywordElement]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - AtLeastN[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchKeywordElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - AtLeastN[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchKeywordElement]], - ] - ], - ] = DoNotCare() - whitespace_after_cls: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_patterns: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_kwds: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchKeywordElement(BaseMatcherNode): - key: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - pattern: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_equal: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_equal: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchSequenceElementOrMatchStarMatchType = Union[ - "MatchSequenceElement", - "MatchStar", - MetadataMatchType, - MatchIfTrue[Union[cst.MatchSequenceElement, cst.MatchStar]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchList(BaseMatcherNode): - patterns: Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - OneOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - ] - ], - ] = DoNotCare() - lbracket: Union[ - Optional["LeftSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftSquareBracket]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["LeftSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftSquareBracket]], - ] - ], - AllOf[ - Union[ - Optional["LeftSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftSquareBracket]], - ] - ], - ] = DoNotCare() - rbracket: Union[ - Optional["RightSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightSquareBracket]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["RightSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightSquareBracket]], - ] - ], - AllOf[ - Union[ - Optional["RightSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightSquareBracket]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchMappingElementMatchType = Union[ - "MatchMappingElement", MetadataMatchType, MatchIfTrue[cst.MatchMappingElement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchMapping(BaseMatcherNode): - elements: Union[ - Sequence[ - Union[ - MatchMappingElementMatchType, - DoNotCareSentinel, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - AtLeastN[ - Union[ - MatchMappingElementMatchType, - DoNotCareSentinel, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchMappingElementMatchType, - DoNotCareSentinel, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchMappingElement]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - AtLeastN[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchMappingElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - AtLeastN[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchMappingElement]], - ] - ], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - rest: Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Name]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - AllOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - ] = DoNotCare() - whitespace_before_rest: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - trailing_comma: Union[ - Optional["Comma"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comma]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] - ] - ], - AllOf[ - Union[ - Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchMappingElement(BaseMatcherNode): - key: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - pattern: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchOrElementMatchType = Union[ - "MatchOrElement", MetadataMatchType, MatchIfTrue[cst.MatchOrElement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchOr(BaseMatcherNode): - patterns: Union[ - Sequence[ - Union[ - MatchOrElementMatchType, - DoNotCareSentinel, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - AtLeastN[ - Union[ - MatchOrElementMatchType, - DoNotCareSentinel, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchOrElementMatchType, - DoNotCareSentinel, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchOrElement]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - AtLeastN[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchOrElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - AtLeastN[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchOrElement]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BitOrMatchType = Union["BitOr", MetadataMatchType, MatchIfTrue[cst.BitOr]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchOrElement(BaseMatcherNode): - pattern: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - separator: Union[ - BitOrMatchType, DoNotCareSentinel, OneOf[BitOrMatchType], AllOf[BitOrMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchPattern(BaseMatcherNode): - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchSequence(BaseMatcherNode): - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchSequenceElement(BaseMatcherNode): - value: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchSingleton(BaseMatcherNode): - value: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchStar(BaseMatcherNode): - name: Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Name]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - AllOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_name: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchTuple(BaseMatcherNode): - patterns: Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - OneOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchValue(BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatrixMultiply(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatrixMultiplyAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Minus(BaseUnaryOp, BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -SimpleStatementLineOrBaseCompoundStatementMatchType = Union[ - "SimpleStatementLine", - "BaseCompoundStatement", - MetadataMatchType, - MatchIfTrue[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Module(BaseMatcherNode): - body: Union[ - Sequence[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - DoNotCareSentinel, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AtLeastN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - DoNotCareSentinel, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - ] - ], - AtMostN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - DoNotCareSentinel, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[ - Sequence[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], - AllOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], - ] - ] - ], - OneOf[ - Union[ - Sequence[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AtLeastN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - AllOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - ] - ], - AtMostN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - AllOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - AllOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - ] - ] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AtLeastN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - AllOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - ] - ], - AtMostN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - AllOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - AllOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - ] - ] - ], - ] - ], - ] = DoNotCare() - header: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - footer: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - encoding: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - default_indent: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - default_newline: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - has_trailing_newline: Union[ - boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Modulo(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ModuloAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Multiply(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MultiplyAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Name( - BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode -): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class NameItem(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class NamedExpr(BaseExpression, BaseMatcherNode): - target: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_before_walrus: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_walrus: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Newline(BaseMatcherNode): - value: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Nonlocal(BaseSmallStatement, BaseMatcherNode): - names: Union[ - Sequence[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.NameItem]], - OneOf[ - Union[ - Sequence[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.NameItem]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.NameItem]], - ] - ], - ] = DoNotCare() - whitespace_after_nonlocal: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Not(BaseUnaryOp, BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class NotEqual(BaseCompOp, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class NotIn(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_between: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Or(BaseBooleanOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Param(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - annotation: Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - ] - ], - AllOf[ - Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - ] - ], - ] = DoNotCare() - equal: Union[ - AssignEqualMatchType, - DoNotCareSentinel, - OneOf[AssignEqualMatchType], - AllOf[AssignEqualMatchType], - ] = DoNotCare() - default: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - star: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - whitespace_after_star: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_param: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ParamSlash(BaseMatcherNode): - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ParamStar(BaseMatcherNode): - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ParamMatchType = Union["Param", MetadataMatchType, MatchIfTrue[cst.Param]] -ParamOrParamStarMatchType = Union[ - "Param", - "ParamStar", - MetadataMatchType, - MatchIfTrue[Union[cst.Param, cst.ParamStar]], -] -ParamSlashMatchType = Union[ - "ParamSlash", MetadataMatchType, MatchIfTrue[cst.ParamSlash] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Parameters(BaseMatcherNode): - params: Union[ - Sequence[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Param]], - OneOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - ] = DoNotCare() - star_arg: Union[ - ParamOrParamStarMatchType, - DoNotCareSentinel, - OneOf[ParamOrParamStarMatchType], - AllOf[ParamOrParamStarMatchType], - ] = DoNotCare() - kwonly_params: Union[ - Sequence[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Param]], - OneOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - ] = DoNotCare() - star_kwarg: Union[ - Optional["Param"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Param]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] - ] - ], - AllOf[ - Union[ - Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] - ] - ], - ] = DoNotCare() - posonly_params: Union[ - Sequence[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Param]], - OneOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - ] = DoNotCare() - posonly_ind: Union[ - ParamSlashMatchType, - DoNotCareSentinel, - OneOf[ParamSlashMatchType], - AllOf[ParamSlashMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ParenthesizedWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): - first_line: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - empty_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - indent: Union[ - boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] - ] = DoNotCare() - last_line: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Pass(BaseSmallStatement, BaseMatcherNode): - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Plus(BaseUnaryOp, BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Power(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class PowerAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Raise(BaseSmallStatement, BaseMatcherNode): - exc: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - cause: Union[ - Optional["From"], - MetadataMatchType, - MatchIfTrue[Optional[cst.From]], - DoNotCareSentinel, - OneOf[ - Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] - ], - AllOf[ - Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] - ], - ] = DoNotCare() - whitespace_after_raise: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Return(BaseSmallStatement, BaseMatcherNode): - value: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - whitespace_after_return: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightCurlyBrace(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightParen(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightShift(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightShiftAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightSquareBracket(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Semicolon(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Set(BaseExpression, BaseSet, BaseMatcherNode): - elements: Union[ - Sequence[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseElement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode): - elt: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - for_in: Union[ - CompForMatchType, - DoNotCareSentinel, - OneOf[CompForMatchType], - AllOf[CompForMatchType], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseSmallStatementMatchType = Union[ - "BaseSmallStatement", MetadataMatchType, MatchIfTrue[cst.BaseSmallStatement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SimpleStatementLine(BaseStatement, BaseMatcherNode): - body: Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - trailing_whitespace: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SimpleStatementSuite(BaseSuite, BaseMatcherNode): - body: Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - ] - ], - ] = DoNotCare() - leading_whitespace: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - trailing_whitespace: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SimpleString(BaseExpression, BaseString, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SimpleWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Slice(BaseSlice, BaseMatcherNode): - lower: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - upper: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - step: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - first_colon: Union[ - ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] - ] = DoNotCare() - second_colon: Union[ - ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class StarredDictElement(BaseDictElement, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_value: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class StarredElement(BaseElement, BaseExpression, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_before_value: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -SubscriptElementMatchType = Union[ - "SubscriptElement", MetadataMatchType, MatchIfTrue[cst.SubscriptElement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Subscript( - BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode -): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - slice: Union[ - Sequence[ - Union[ - SubscriptElementMatchType, - DoNotCareSentinel, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - AtLeastN[ - Union[ - SubscriptElementMatchType, - DoNotCareSentinel, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - AtMostN[ - Union[ - SubscriptElementMatchType, - DoNotCareSentinel, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.SubscriptElement]], - OneOf[ - Union[ - Sequence[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - AtLeastN[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - AtMostN[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.SubscriptElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - AtLeastN[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - AtMostN[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.SubscriptElement]], - ] - ], - ] = DoNotCare() - lbracket: Union[ - LeftSquareBracketMatchType, - DoNotCareSentinel, - OneOf[LeftSquareBracketMatchType], - AllOf[LeftSquareBracketMatchType], - ] = DoNotCare() - rbracket: Union[ - RightSquareBracketMatchType, - DoNotCareSentinel, - OneOf[RightSquareBracketMatchType], - AllOf[RightSquareBracketMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_value: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseSliceMatchType = Union["BaseSlice", MetadataMatchType, MatchIfTrue[cst.BaseSlice]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SubscriptElement(BaseMatcherNode): - slice: Union[ - BaseSliceMatchType, - DoNotCareSentinel, - OneOf[BaseSliceMatchType], - AllOf[BaseSliceMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Subtract(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SubtractAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TrailingWhitespace(BaseMatcherNode): - whitespace: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - comment: Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - ] - ], - AllOf[ - Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - ] - ], - ] = DoNotCare() - newline: Union[ - NewlineMatchType, - DoNotCareSentinel, - OneOf[NewlineMatchType], - AllOf[NewlineMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ExceptHandlerMatchType = Union[ - "ExceptHandler", MetadataMatchType, MatchIfTrue[cst.ExceptHandler] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - handlers: Union[ - Sequence[ - Union[ - ExceptHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - AtLeastN[ - Union[ - ExceptHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ExceptHandler]], - OneOf[ - Union[ - Sequence[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - AtLeastN[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ExceptHandler]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - AtLeastN[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ExceptHandler]], - ] - ], - ] = DoNotCare() - orelse: Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Else]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - AllOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - ] = DoNotCare() - finalbody: Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - ] - ], - AllOf[ - Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ExceptStarHandlerMatchType = Union[ - "ExceptStarHandler", MetadataMatchType, MatchIfTrue[cst.ExceptStarHandler] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TryStar(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - handlers: Union[ - Sequence[ - Union[ - ExceptStarHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - AtLeastN[ - Union[ - ExceptStarHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptStarHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ExceptStarHandler]], - OneOf[ - Union[ - Sequence[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - AtLeastN[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ExceptStarHandler]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - AtLeastN[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ExceptStarHandler]], - ] - ], - ] = DoNotCare() - orelse: Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Else]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - AllOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - ] = DoNotCare() - finalbody: Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - ] - ], - AllOf[ - Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Tuple( - BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode -): - elements: Union[ - Sequence[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseElement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseUnaryOpMatchType = Union[ - "BaseUnaryOp", MetadataMatchType, MatchIfTrue[cst.BaseUnaryOp] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class UnaryOperation(BaseExpression, BaseMatcherNode): - operator: Union[ - BaseUnaryOpMatchType, - DoNotCareSentinel, - OneOf[BaseUnaryOpMatchType], - AllOf[BaseUnaryOpMatchType], - ] = DoNotCare() - expression: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - orelse: Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Else]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - AllOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_while: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -WithItemMatchType = Union["WithItem", MetadataMatchType, MatchIfTrue[cst.WithItem]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - items: Union[ - Sequence[ - Union[ - WithItemMatchType, - DoNotCareSentinel, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - AtLeastN[ - Union[ - WithItemMatchType, - DoNotCareSentinel, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - AtMostN[ - Union[ - WithItemMatchType, - DoNotCareSentinel, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.WithItem]], - OneOf[ - Union[ - Sequence[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - AtLeastN[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - AtMostN[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.WithItem]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - AtLeastN[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - AtMostN[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.WithItem]], - ] - ], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - asynchronous: Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - AllOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - lpar: Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] = DoNotCare() - rpar: Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] = DoNotCare() - whitespace_after_with: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class WithItem(BaseMatcherNode): - item: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - asname: Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AsName]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - AllOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseExpressionOrFromOrNoneMatchType = Union[ - "BaseExpression", - "From", - None, - MetadataMatchType, - MatchIfTrue[Union[cst.BaseExpression, cst.From, None]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Yield(BaseExpression, BaseMatcherNode): - value: Union[ - BaseExpressionOrFromOrNoneMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionOrFromOrNoneMatchType], - AllOf[BaseExpressionOrFromOrNoneMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_yield: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -__all__ = [ - "Add", - "AddAssign", - "AllOf", - "And", - "AnnAssign", - "Annotation", - "Arg", - "AsName", - "Assert", - "Assign", - "AssignEqual", - "AssignTarget", - "Asynchronous", - "AtLeastN", - "AtMostN", - "Attribute", - "AugAssign", - "Await", - "BaseAssignTargetExpression", - "BaseAugOp", - "BaseBinaryOp", - "BaseBooleanOp", - "BaseComp", - "BaseCompOp", - "BaseCompoundStatement", - "BaseDelTargetExpression", - "BaseDict", - "BaseDictElement", - "BaseElement", - "BaseExpression", - "BaseFormattedStringContent", - "BaseList", - "BaseMatcherNode", - "BaseMetadataProvider", - "BaseNumber", - "BaseParenthesizableWhitespace", - "BaseSet", - "BaseSimpleComp", - "BaseSlice", - "BaseSmallStatement", - "BaseStatement", - "BaseString", - "BaseSuite", - "BaseUnaryOp", - "BinaryOperation", - "BitAnd", - "BitAndAssign", - "BitInvert", - "BitOr", - "BitOrAssign", - "BitXor", - "BitXorAssign", - "BooleanOperation", - "Break", - "Call", - "ClassDef", - "Colon", - "Comma", - "Comment", - "CompFor", - "CompIf", - "Comparison", - "ComparisonTarget", - "ConcatenatedString", - "Continue", - "Decorator", - "Del", - "Dict", - "DictComp", - "DictElement", - "Divide", - "DivideAssign", - "DoNotCare", - "DoNotCareSentinel", - "DoesNotMatch", - "Dot", - "Element", - "Ellipsis", - "Else", - "EmptyLine", - "Equal", - "ExceptHandler", - "ExceptStarHandler", - "Expr", - "Finally", - "Float", - "FloorDivide", - "FloorDivideAssign", - "For", - "FormattedString", - "FormattedStringExpression", - "FormattedStringText", - "From", - "FunctionDef", - "GeneratorExp", - "Global", - "GreaterThan", - "GreaterThanEqual", - "If", - "IfExp", - "Imaginary", - "Import", - "ImportAlias", - "ImportFrom", - "ImportStar", - "In", - "IndentedBlock", - "Index", - "Integer", - "Is", - "IsNot", - "Lambda", - "LeftCurlyBrace", - "LeftParen", - "LeftShift", - "LeftShiftAssign", - "LeftSquareBracket", - "LessThan", - "LessThanEqual", - "List", - "ListComp", - "Match", - "MatchAs", - "MatchCase", - "MatchClass", - "MatchDecoratorMismatch", - "MatchIfTrue", - "MatchKeywordElement", - "MatchList", - "MatchMapping", - "MatchMappingElement", - "MatchMetadata", - "MatchMetadataIfTrue", - "MatchOr", - "MatchOrElement", - "MatchPattern", - "MatchRegex", - "MatchSequence", - "MatchSequenceElement", - "MatchSingleton", - "MatchStar", - "MatchTuple", - "MatchValue", - "MatcherDecoratableTransformer", - "MatcherDecoratableVisitor", - "MatrixMultiply", - "MatrixMultiplyAssign", - "Minus", - "Module", - "Modulo", - "ModuloAssign", - "Multiply", - "MultiplyAssign", - "Name", - "NameItem", - "NamedExpr", - "Newline", - "Nonlocal", - "Not", - "NotEqual", - "NotIn", - "OneOf", - "Or", - "Param", - "ParamSlash", - "ParamStar", - "Parameters", - "ParenthesizedWhitespace", - "Pass", - "Plus", - "Power", - "PowerAssign", - "Raise", - "Return", - "RightCurlyBrace", - "RightParen", - "RightShift", - "RightShiftAssign", - "RightSquareBracket", - "SaveMatchedNode", - "Semicolon", - "Set", - "SetComp", - "SimpleStatementLine", - "SimpleStatementSuite", - "SimpleString", - "SimpleWhitespace", - "Slice", - "StarredDictElement", - "StarredElement", - "Subscript", - "SubscriptElement", - "Subtract", - "SubtractAssign", - "TrailingWhitespace", - "Try", - "TryStar", - "Tuple", - "TypeOf", - "UnaryOperation", - "While", - "With", - "WithItem", - "Yield", - "ZeroOrMore", - "ZeroOrOne", - "call_if_inside", - "call_if_not_inside", - "extract", - "extractall", - "findall", - "leave", - "matches", - "replace", - "visit", -] +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +# This file was generated by libcst.codegen.gen_matcher_classes +from dataclasses import dataclass +from typing import Optional, Sequence, Union + +from typing_extensions import Literal + +import libcst as cst +from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit + +from libcst.matchers._matcher_base import ( + AbstractBaseMatcherNodeMeta, + AllOf, + AtLeastN, + AtMostN, + BaseMatcherNode, + DoesNotMatch, + DoNotCare, + DoNotCareSentinel, + extract, + extractall, + findall, + matches, + MatchIfTrue, + MatchMetadata, + MatchMetadataIfTrue, + MatchRegex, + OneOf, + replace, + SaveMatchedNode, + TypeOf, + ZeroOrMore, + ZeroOrOne, +) +from libcst.matchers._visitors import ( + MatchDecoratorMismatch, + MatcherDecoratableTransformer, + MatcherDecoratableVisitor, +) + + +class _NodeABC(metaclass=AbstractBaseMatcherNodeMeta): + __slots__ = () + + +class BaseAssignTargetExpression(_NodeABC): + pass + + +class BaseAugOp(_NodeABC): + pass + + +class BaseBinaryOp(_NodeABC): + pass + + +class BaseBooleanOp(_NodeABC): + pass + + +class BaseComp(_NodeABC): + pass + + +class BaseCompOp(_NodeABC): + pass + + +class BaseCompoundStatement(_NodeABC): + pass + + +class BaseDelTargetExpression(_NodeABC): + pass + + +class BaseDict(_NodeABC): + pass + + +class BaseDictElement(_NodeABC): + pass + + +class BaseElement(_NodeABC): + pass + + +class BaseExpression(_NodeABC): + pass + + +class BaseFormattedStringContent(_NodeABC): + pass + + +class BaseList(_NodeABC): + pass + + +class BaseMetadataProvider(_NodeABC): + pass + + +class BaseNumber(_NodeABC): + pass + + +class BaseParenthesizableWhitespace(_NodeABC): + pass + + +class BaseSet(_NodeABC): + pass + + +class BaseSimpleComp(_NodeABC): + pass + + +class BaseSlice(_NodeABC): + pass + + +class BaseSmallStatement(_NodeABC): + pass + + +class BaseStatement(_NodeABC): + pass + + +class BaseString(_NodeABC): + pass + + +class BaseSuite(_NodeABC): + pass + + +class BaseUnaryOp(_NodeABC): + pass + + +MetadataMatchType = Union[MatchMetadata, MatchMetadataIfTrue] + + +BaseParenthesizableWhitespaceMatchType = Union[ + "BaseParenthesizableWhitespace", + MetadataMatchType, + MatchIfTrue[cst.BaseParenthesizableWhitespace], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Add(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AddAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class And(BaseBooleanOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseAssignTargetExpressionMatchType = Union[ + "BaseAssignTargetExpression", + MetadataMatchType, + MatchIfTrue[cst.BaseAssignTargetExpression], +] +AnnotationMatchType = Union[ + "Annotation", MetadataMatchType, MatchIfTrue[cst.Annotation] +] +AssignEqualMatchType = Union[ + "AssignEqual", MetadataMatchType, MatchIfTrue[cst.AssignEqual] +] +SemicolonMatchType = Union["Semicolon", MetadataMatchType, MatchIfTrue[cst.Semicolon]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AnnAssign(BaseSmallStatement, BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + annotation: Union[ + AnnotationMatchType, + DoNotCareSentinel, + OneOf[AnnotationMatchType], + AllOf[AnnotationMatchType], + ] = DoNotCare() + value: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + equal: Union[ + AssignEqualMatchType, + DoNotCareSentinel, + OneOf[AssignEqualMatchType], + AllOf[AssignEqualMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseExpressionMatchType = Union[ + "BaseExpression", MetadataMatchType, MatchIfTrue[cst.BaseExpression] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Annotation(BaseMatcherNode): + annotation: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + whitespace_before_indicator: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_indicator: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +CommaMatchType = Union["Comma", MetadataMatchType, MatchIfTrue[cst.Comma]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Arg(BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + keyword: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + equal: Union[ + AssignEqualMatchType, + DoNotCareSentinel, + OneOf[AssignEqualMatchType], + AllOf[AssignEqualMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + star: Union[ + Literal["", "*", "**"], + MetadataMatchType, + MatchIfTrue[Literal["", "*", "**"]], + DoNotCareSentinel, + OneOf[ + Union[ + Literal["", "*", "**"], + MetadataMatchType, + MatchIfTrue[Literal["", "*", "**"]], + ] + ], + AllOf[ + Union[ + Literal["", "*", "**"], + MetadataMatchType, + MatchIfTrue[Literal["", "*", "**"]], + ] + ], + ] = DoNotCare() + whitespace_after_star: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_arg: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +NameOrTupleOrListMatchType = Union[ + "Name", + "Tuple", + "List", + MetadataMatchType, + MatchIfTrue[Union[cst.Name, cst.Tuple, cst.List]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AsName(BaseMatcherNode): + name: Union[ + NameOrTupleOrListMatchType, + DoNotCareSentinel, + OneOf[NameOrTupleOrListMatchType], + AllOf[NameOrTupleOrListMatchType], + ] = DoNotCare() + whitespace_before_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +SimpleWhitespaceMatchType = Union[ + "SimpleWhitespace", MetadataMatchType, MatchIfTrue[cst.SimpleWhitespace] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Assert(BaseSmallStatement, BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + msg: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_after_assert: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +AssignTargetMatchType = Union[ + "AssignTarget", MetadataMatchType, MatchIfTrue[cst.AssignTarget] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Assign(BaseSmallStatement, BaseMatcherNode): + targets: Union[ + Sequence[ + Union[ + AssignTargetMatchType, + DoNotCareSentinel, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + AtLeastN[ + Union[ + AssignTargetMatchType, + DoNotCareSentinel, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + AtMostN[ + Union[ + AssignTargetMatchType, + DoNotCareSentinel, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.AssignTarget]], + OneOf[ + Union[ + Sequence[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + AtLeastN[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + AtMostN[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.AssignTarget]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + AtLeastN[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + AtMostN[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.AssignTarget]], + ] + ], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AssignEqual(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AssignTarget(BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + whitespace_before_equal: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_equal: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Asynchronous(BaseMatcherNode): + whitespace_after: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +NameMatchType = Union["Name", MetadataMatchType, MatchIfTrue[cst.Name]] +DotMatchType = Union["Dot", MetadataMatchType, MatchIfTrue[cst.Dot]] +LeftParenMatchType = Union["LeftParen", MetadataMatchType, MatchIfTrue[cst.LeftParen]] +RightParenMatchType = Union[ + "RightParen", MetadataMatchType, MatchIfTrue[cst.RightParen] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Attribute( + BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode +): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + attr: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + dot: Union[ + DotMatchType, DoNotCareSentinel, OneOf[DotMatchType], AllOf[DotMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseAugOpMatchType = Union["BaseAugOp", MetadataMatchType, MatchIfTrue[cst.BaseAugOp]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AugAssign(BaseSmallStatement, BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + operator: Union[ + BaseAugOpMatchType, + DoNotCareSentinel, + OneOf[BaseAugOpMatchType], + AllOf[BaseAugOpMatchType], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Await(BaseExpression, BaseMatcherNode): + expression: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_await: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseBinaryOpMatchType = Union[ + "BaseBinaryOp", MetadataMatchType, MatchIfTrue[cst.BaseBinaryOp] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BinaryOperation(BaseExpression, BaseMatcherNode): + left: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + operator: Union[ + BaseBinaryOpMatchType, + DoNotCareSentinel, + OneOf[BaseBinaryOpMatchType], + AllOf[BaseBinaryOpMatchType], + ] = DoNotCare() + right: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitAnd(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitAndAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitInvert(BaseUnaryOp, BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitOr(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitOrAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitXor(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitXorAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseBooleanOpMatchType = Union[ + "BaseBooleanOp", MetadataMatchType, MatchIfTrue[cst.BaseBooleanOp] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BooleanOperation(BaseExpression, BaseMatcherNode): + left: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + operator: Union[ + BaseBooleanOpMatchType, + DoNotCareSentinel, + OneOf[BaseBooleanOpMatchType], + AllOf[BaseBooleanOpMatchType], + ] = DoNotCare() + right: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Break(BaseSmallStatement, BaseMatcherNode): + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ArgMatchType = Union["Arg", MetadataMatchType, MatchIfTrue[cst.Arg]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Call(BaseExpression, BaseMatcherNode): + func: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + args: Union[ + Sequence[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + AtMostN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Arg]], + OneOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_func: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_args: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseSuiteMatchType = Union["BaseSuite", MetadataMatchType, MatchIfTrue[cst.BaseSuite]] +DecoratorMatchType = Union["Decorator", MetadataMatchType, MatchIfTrue[cst.Decorator]] +EmptyLineMatchType = Union["EmptyLine", MetadataMatchType, MatchIfTrue[cst.EmptyLine]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + bases: Union[ + Sequence[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + AtMostN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Arg]], + OneOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + ] = DoNotCare() + keywords: Union[ + Sequence[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + AtMostN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Arg]], + OneOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + ] = DoNotCare() + decorators: Union[ + Sequence[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Decorator]], + OneOf[ + Union[ + Sequence[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Decorator]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Decorator]], + ] + ], + ] = DoNotCare() + lpar: Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] = DoNotCare() + rpar: Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + lines_after_decorators: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_class: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_name: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Colon(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Comma(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +strMatchType = Union[str, MetadataMatchType, MatchIfTrue[str]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Comment(BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +CompIfMatchType = Union["CompIf", MetadataMatchType, MatchIfTrue[cst.CompIf]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class CompFor(BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + iter: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + ifs: Union[ + Sequence[ + Union[ + CompIfMatchType, + DoNotCareSentinel, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + AtLeastN[ + Union[ + CompIfMatchType, + DoNotCareSentinel, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + AtMostN[ + Union[ + CompIfMatchType, + DoNotCareSentinel, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.CompIf]], + OneOf[ + Union[ + Sequence[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + AtLeastN[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + AtMostN[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.CompIf]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + AtLeastN[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + AtMostN[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.CompIf]], + ] + ], + ] = DoNotCare() + inner_for_in: Union[ + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Optional[cst.CompFor]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Optional[cst.CompFor]], + ] + ], + AllOf[ + Union[ + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Optional[cst.CompFor]], + ] + ], + ] = DoNotCare() + asynchronous: Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + ] = DoNotCare() + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_for: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_in: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_in: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class CompIf(BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_test: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ComparisonTargetMatchType = Union[ + "ComparisonTarget", MetadataMatchType, MatchIfTrue[cst.ComparisonTarget] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Comparison(BaseExpression, BaseMatcherNode): + left: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comparisons: Union[ + Sequence[ + Union[ + ComparisonTargetMatchType, + DoNotCareSentinel, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + AtLeastN[ + Union[ + ComparisonTargetMatchType, + DoNotCareSentinel, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + AtMostN[ + Union[ + ComparisonTargetMatchType, + DoNotCareSentinel, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ComparisonTarget]], + OneOf[ + Union[ + Sequence[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + AtLeastN[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + AtMostN[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ComparisonTarget]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + AtLeastN[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + AtMostN[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ComparisonTarget]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseCompOpMatchType = Union[ + "BaseCompOp", MetadataMatchType, MatchIfTrue[cst.BaseCompOp] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ComparisonTarget(BaseMatcherNode): + operator: Union[ + BaseCompOpMatchType, + DoNotCareSentinel, + OneOf[BaseCompOpMatchType], + AllOf[BaseCompOpMatchType], + ] = DoNotCare() + comparator: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +SimpleStringOrFormattedStringMatchType = Union[ + "SimpleString", + "FormattedString", + MetadataMatchType, + MatchIfTrue[Union[cst.SimpleString, cst.FormattedString]], +] +SimpleStringOrFormattedStringOrConcatenatedStringMatchType = Union[ + "SimpleString", + "FormattedString", + "ConcatenatedString", + MetadataMatchType, + MatchIfTrue[Union[cst.SimpleString, cst.FormattedString, cst.ConcatenatedString]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): + left: Union[ + SimpleStringOrFormattedStringMatchType, + DoNotCareSentinel, + OneOf[SimpleStringOrFormattedStringMatchType], + AllOf[SimpleStringOrFormattedStringMatchType], + ] = DoNotCare() + right: Union[ + SimpleStringOrFormattedStringOrConcatenatedStringMatchType, + DoNotCareSentinel, + OneOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType], + AllOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_between: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Continue(BaseSmallStatement, BaseMatcherNode): + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +TrailingWhitespaceMatchType = Union[ + "TrailingWhitespace", MetadataMatchType, MatchIfTrue[cst.TrailingWhitespace] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Decorator(BaseMatcherNode): + decorator: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_at: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + trailing_whitespace: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseDelTargetExpressionMatchType = Union[ + "BaseDelTargetExpression", + MetadataMatchType, + MatchIfTrue[cst.BaseDelTargetExpression], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Del(BaseSmallStatement, BaseMatcherNode): + target: Union[ + BaseDelTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseDelTargetExpressionMatchType], + AllOf[BaseDelTargetExpressionMatchType], + ] = DoNotCare() + whitespace_after_del: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseDictElementMatchType = Union[ + "BaseDictElement", MetadataMatchType, MatchIfTrue[cst.BaseDictElement] +] +LeftCurlyBraceMatchType = Union[ + "LeftCurlyBrace", MetadataMatchType, MatchIfTrue[cst.LeftCurlyBrace] +] +RightCurlyBraceMatchType = Union[ + "RightCurlyBrace", MetadataMatchType, MatchIfTrue[cst.RightCurlyBrace] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Dict(BaseDict, BaseExpression, BaseMatcherNode): + elements: Union[ + Sequence[ + Union[ + BaseDictElementMatchType, + DoNotCareSentinel, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + AtLeastN[ + Union[ + BaseDictElementMatchType, + DoNotCareSentinel, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseDictElementMatchType, + DoNotCareSentinel, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseDictElement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + AtLeastN[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseDictElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + AtLeastN[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseDictElement]], + ] + ], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +CompForMatchType = Union["CompFor", MetadataMatchType, MatchIfTrue[cst.CompFor]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): + key: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + for_in: Union[ + CompForMatchType, + DoNotCareSentinel, + OneOf[CompForMatchType], + AllOf[CompForMatchType], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class DictElement(BaseDictElement, BaseMatcherNode): + key: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Divide(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class DivideAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Dot(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Element(BaseElement, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Ellipsis(BaseExpression, BaseMatcherNode): + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Else(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +boolMatchType = Union[bool, MetadataMatchType, MatchIfTrue[bool]] +NewlineMatchType = Union["Newline", MetadataMatchType, MatchIfTrue[cst.Newline]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class EmptyLine(BaseMatcherNode): + indent: Union[ + boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] + ] = DoNotCare() + whitespace: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + comment: Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + ] + ], + AllOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + ] + ], + ] = DoNotCare() + newline: Union[ + NewlineMatchType, + DoNotCareSentinel, + OneOf[NewlineMatchType], + AllOf[NewlineMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Equal(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ExceptHandler(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + type: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + name: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_except: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ExceptStarHandler(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + type: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + name: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_except: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_star: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Expr(BaseSmallStatement, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Finally(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Float(BaseExpression, BaseNumber, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FloorDivide(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FloorDivideAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + iter: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + asynchronous: Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_for: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_in: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_in: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseFormattedStringContentMatchType = Union[ + "BaseFormattedStringContent", + MetadataMatchType, + MatchIfTrue[cst.BaseFormattedStringContent], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FormattedString(BaseExpression, BaseString, BaseMatcherNode): + parts: Union[ + Sequence[ + Union[ + BaseFormattedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + AtLeastN[ + Union[ + BaseFormattedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseFormattedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + AtLeastN[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + AtLeastN[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], + ] + ], + ] = DoNotCare() + start: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + end: Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + DoNotCareSentinel, + OneOf[ + Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + ] + ], + AllOf[ + Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): + expression: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + conversion: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + format_spec: Union[ + Optional[Sequence["BaseFormattedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional[Sequence["BaseFormattedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], + ] + ], + AllOf[ + Union[ + Optional[Sequence["BaseFormattedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], + ] + ], + ] = DoNotCare() + whitespace_before_expression: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_expression: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + equal: Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + ] + ], + AllOf[ + Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FormattedStringText(BaseFormattedStringContent, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class From(BaseMatcherNode): + item: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + whitespace_before_from: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_from: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ParametersMatchType = Union[ + "Parameters", MetadataMatchType, MatchIfTrue[cst.Parameters] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + params: Union[ + ParametersMatchType, + DoNotCareSentinel, + OneOf[ParametersMatchType], + AllOf[ParametersMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + decorators: Union[ + Sequence[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Decorator]], + OneOf[ + Union[ + Sequence[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Decorator]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Decorator]], + ] + ], + ] = DoNotCare() + returns: Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + ] + ], + AllOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + ] + ], + ] = DoNotCare() + asynchronous: Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + lines_after_decorators: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_def: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_name: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_params: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): + elt: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + for_in: Union[ + CompForMatchType, + DoNotCareSentinel, + OneOf[CompForMatchType], + AllOf[CompForMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +NameItemMatchType = Union["NameItem", MetadataMatchType, MatchIfTrue[cst.NameItem]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Global(BaseSmallStatement, BaseMatcherNode): + names: Union[ + Sequence[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.NameItem]], + OneOf[ + Union[ + Sequence[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.NameItem]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.NameItem]], + ] + ], + ] = DoNotCare() + whitespace_after_global: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class GreaterThan(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class GreaterThanEqual(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +IfOrElseOrNoneMatchType = Union[ + "If", "Else", None, MetadataMatchType, MatchIfTrue[Union[cst.If, cst.Else, None]] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class If(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + orelse: Union[ + IfOrElseOrNoneMatchType, + DoNotCareSentinel, + OneOf[IfOrElseOrNoneMatchType], + AllOf[IfOrElseOrNoneMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_test: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_test: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class IfExp(BaseExpression, BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + body: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + orelse: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_before_if: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_if: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_else: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_else: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ImportAliasMatchType = Union[ + "ImportAlias", MetadataMatchType, MatchIfTrue[cst.ImportAlias] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Import(BaseSmallStatement, BaseMatcherNode): + names: Union[ + Sequence[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ImportAlias]], + OneOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + whitespace_after_import: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +AttributeOrNameMatchType = Union[ + "Attribute", "Name", MetadataMatchType, MatchIfTrue[Union[cst.Attribute, cst.Name]] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ImportAlias(BaseMatcherNode): + name: Union[ + AttributeOrNameMatchType, + DoNotCareSentinel, + OneOf[AttributeOrNameMatchType], + AllOf[AttributeOrNameMatchType], + ] = DoNotCare() + asname: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +AttributeOrNameOrNoneMatchType = Union[ + "Attribute", + "Name", + None, + MetadataMatchType, + MatchIfTrue[Union[cst.Attribute, cst.Name, None]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ImportFrom(BaseSmallStatement, BaseMatcherNode): + module: Union[ + AttributeOrNameOrNoneMatchType, + DoNotCareSentinel, + OneOf[AttributeOrNameOrNoneMatchType], + AllOf[AttributeOrNameOrNoneMatchType], + ] = DoNotCare() + names: Union[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ImportAlias]], + OneOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + ], + "ImportStar", + MetadataMatchType, + MatchIfTrue[ + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + ] + ], + DoNotCareSentinel, + OneOf[ + Union[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + OneOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + ], + "ImportStar", + MetadataMatchType, + MatchIfTrue[ + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + ] + ], + ] + ], + AllOf[ + Union[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + OneOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + ], + "ImportStar", + MetadataMatchType, + MatchIfTrue[ + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + ] + ], + ] + ], + ] = DoNotCare() + relative: Union[ + Sequence[ + Union[ + DotMatchType, + DoNotCareSentinel, + OneOf[DotMatchType], + AllOf[DotMatchType], + AtLeastN[ + Union[ + DotMatchType, + DoNotCareSentinel, + OneOf[DotMatchType], + AllOf[DotMatchType], + ] + ], + AtMostN[ + Union[ + DotMatchType, + DoNotCareSentinel, + OneOf[DotMatchType], + AllOf[DotMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Dot]], + OneOf[ + Union[ + Sequence[ + Union[ + DotMatchType, + OneOf[DotMatchType], + AllOf[DotMatchType], + AtLeastN[ + Union[ + DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] + ] + ], + AtMostN[ + Union[ + DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Dot]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + DotMatchType, + OneOf[DotMatchType], + AllOf[DotMatchType], + AtLeastN[ + Union[ + DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] + ] + ], + AtMostN[ + Union[ + DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Dot]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftParen]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightParen]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightParen]], + ] + ], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + whitespace_after_from: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_import: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_import: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ImportStar(BaseMatcherNode): + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class In(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseStatementMatchType = Union[ + "BaseStatement", MetadataMatchType, MatchIfTrue[cst.BaseStatement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class IndentedBlock(BaseSuite, BaseMatcherNode): + body: Union[ + Sequence[ + Union[ + BaseStatementMatchType, + DoNotCareSentinel, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + AtLeastN[ + Union[ + BaseStatementMatchType, + DoNotCareSentinel, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseStatementMatchType, + DoNotCareSentinel, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseStatement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + AtLeastN[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseStatement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + AtLeastN[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseStatement]], + ] + ], + ] = DoNotCare() + header: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + indent: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + footer: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Index(BaseSlice, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + star: Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + ] + ], + AllOf[ + Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + ] + ], + ] = DoNotCare() + whitespace_after_star: Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + ] + ], + AllOf[ + Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Integer(BaseExpression, BaseNumber, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Is(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class IsNot(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_between: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ColonMatchType = Union["Colon", MetadataMatchType, MatchIfTrue[cst.Colon]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Lambda(BaseExpression, BaseMatcherNode): + params: Union[ + ParametersMatchType, + DoNotCareSentinel, + OneOf[ParametersMatchType], + AllOf[ParametersMatchType], + ] = DoNotCare() + body: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + colon: Union[ + ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_lambda: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftCurlyBrace(BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftParen(BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftShift(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftShiftAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftSquareBracket(BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LessThan(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LessThanEqual(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseElementMatchType = Union[ + "BaseElement", MetadataMatchType, MatchIfTrue[cst.BaseElement] +] +LeftSquareBracketMatchType = Union[ + "LeftSquareBracket", MetadataMatchType, MatchIfTrue[cst.LeftSquareBracket] +] +RightSquareBracketMatchType = Union[ + "RightSquareBracket", MetadataMatchType, MatchIfTrue[cst.RightSquareBracket] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class List( + BaseAssignTargetExpression, + BaseDelTargetExpression, + BaseExpression, + BaseList, + BaseMatcherNode, +): + elements: Union[ + Sequence[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseElement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + ] = DoNotCare() + lbracket: Union[ + LeftSquareBracketMatchType, + DoNotCareSentinel, + OneOf[LeftSquareBracketMatchType], + AllOf[LeftSquareBracketMatchType], + ] = DoNotCare() + rbracket: Union[ + RightSquareBracketMatchType, + DoNotCareSentinel, + OneOf[RightSquareBracketMatchType], + AllOf[RightSquareBracketMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNode): + elt: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + for_in: Union[ + CompForMatchType, + DoNotCareSentinel, + OneOf[CompForMatchType], + AllOf[CompForMatchType], + ] = DoNotCare() + lbracket: Union[ + LeftSquareBracketMatchType, + DoNotCareSentinel, + OneOf[LeftSquareBracketMatchType], + AllOf[LeftSquareBracketMatchType], + ] = DoNotCare() + rbracket: Union[ + RightSquareBracketMatchType, + DoNotCareSentinel, + OneOf[RightSquareBracketMatchType], + AllOf[RightSquareBracketMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchCaseMatchType = Union["MatchCase", MetadataMatchType, MatchIfTrue[cst.MatchCase]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Match(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + subject: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + cases: Union[ + Sequence[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchCase]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchCase]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchCase]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_match: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + indent: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + footer: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchAs(BaseMatcherNode): + pattern: Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + ] + ], + AllOf[ + Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + ] + ], + ] = DoNotCare() + name: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + whitespace_before_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchPatternMatchType = Union[ + "MatchPattern", MetadataMatchType, MatchIfTrue[cst.MatchPattern] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchCase(BaseMatcherNode): + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + guard: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_case: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_if: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_if: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchSequenceElementMatchType = Union[ + "MatchSequenceElement", MetadataMatchType, MatchIfTrue[cst.MatchSequenceElement] +] +MatchKeywordElementMatchType = Union[ + "MatchKeywordElement", MetadataMatchType, MatchIfTrue[cst.MatchKeywordElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchClass(BaseMatcherNode): + cls: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + ] + ], + ] = DoNotCare() + kwds: Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + ] + ], + ] = DoNotCare() + whitespace_after_cls: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_patterns: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_kwds: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchKeywordElement(BaseMatcherNode): + key: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_equal: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_equal: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchSequenceElementOrMatchStarMatchType = Union[ + "MatchSequenceElement", + "MatchStar", + MetadataMatchType, + MatchIfTrue[Union[cst.MatchSequenceElement, cst.MatchStar]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchList(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + ] = DoNotCare() + lbracket: Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + ] + ], + AllOf[ + Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + ] + ], + ] = DoNotCare() + rbracket: Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + ] + ], + AllOf[ + Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchMappingElementMatchType = Union[ + "MatchMappingElement", MetadataMatchType, MatchIfTrue[cst.MatchMappingElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchMapping(BaseMatcherNode): + elements: Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchMappingElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchMappingElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchMappingElement]], + ] + ], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + rest: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + whitespace_before_rest: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + trailing_comma: Union[ + Optional["Comma"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comma]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] + ] + ], + AllOf[ + Union[ + Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchMappingElement(BaseMatcherNode): + key: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchOrElementMatchType = Union[ + "MatchOrElement", MetadataMatchType, MatchIfTrue[cst.MatchOrElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchOr(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchOrElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchOrElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchOrElement]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BitOrMatchType = Union["BitOr", MetadataMatchType, MatchIfTrue[cst.BitOr]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchOrElement(BaseMatcherNode): + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + separator: Union[ + BitOrMatchType, DoNotCareSentinel, OneOf[BitOrMatchType], AllOf[BitOrMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchPattern(BaseMatcherNode): + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSequence(BaseMatcherNode): + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSequenceElement(BaseMatcherNode): + value: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSingleton(BaseMatcherNode): + value: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchStar(BaseMatcherNode): + name: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_name: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchTuple(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchValue(BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatrixMultiply(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatrixMultiplyAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Minus(BaseUnaryOp, BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +SimpleStatementLineOrBaseCompoundStatementMatchType = Union[ + "SimpleStatementLine", + "BaseCompoundStatement", + MetadataMatchType, + MatchIfTrue[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Module(BaseMatcherNode): + body: Union[ + Sequence[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + DoNotCareSentinel, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AtLeastN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + DoNotCareSentinel, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + ] + ], + AtMostN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + DoNotCareSentinel, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[ + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], + AllOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], + ] + ] + ], + OneOf[ + Union[ + Sequence[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AtLeastN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + AllOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + ] + ], + AtMostN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + AllOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + AllOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + ] + ] + ], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AtLeastN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + AllOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + ] + ], + AtMostN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + AllOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + AllOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + ] + ] + ], + ] + ], + ] = DoNotCare() + header: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + footer: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + encoding: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + default_indent: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + default_newline: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + has_trailing_newline: Union[ + boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Modulo(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ModuloAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Multiply(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MultiplyAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Name( + BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode +): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class NameItem(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class NamedExpr(BaseExpression, BaseMatcherNode): + target: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_before_walrus: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_walrus: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Newline(BaseMatcherNode): + value: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Nonlocal(BaseSmallStatement, BaseMatcherNode): + names: Union[ + Sequence[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.NameItem]], + OneOf[ + Union[ + Sequence[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.NameItem]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.NameItem]], + ] + ], + ] = DoNotCare() + whitespace_after_nonlocal: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Not(BaseUnaryOp, BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class NotEqual(BaseCompOp, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class NotIn(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_between: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Or(BaseBooleanOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Param(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + annotation: Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + ] + ], + AllOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + ] + ], + ] = DoNotCare() + equal: Union[ + AssignEqualMatchType, + DoNotCareSentinel, + OneOf[AssignEqualMatchType], + AllOf[AssignEqualMatchType], + ] = DoNotCare() + default: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + star: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + whitespace_after_star: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_param: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ParamSlash(BaseMatcherNode): + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ParamStar(BaseMatcherNode): + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ParamMatchType = Union["Param", MetadataMatchType, MatchIfTrue[cst.Param]] +ParamOrParamStarMatchType = Union[ + "Param", + "ParamStar", + MetadataMatchType, + MatchIfTrue[Union[cst.Param, cst.ParamStar]], +] +ParamSlashMatchType = Union[ + "ParamSlash", MetadataMatchType, MatchIfTrue[cst.ParamSlash] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Parameters(BaseMatcherNode): + params: Union[ + Sequence[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Param]], + OneOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + ] = DoNotCare() + star_arg: Union[ + ParamOrParamStarMatchType, + DoNotCareSentinel, + OneOf[ParamOrParamStarMatchType], + AllOf[ParamOrParamStarMatchType], + ] = DoNotCare() + kwonly_params: Union[ + Sequence[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Param]], + OneOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + ] = DoNotCare() + star_kwarg: Union[ + Optional["Param"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Param]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] + ] + ], + AllOf[ + Union[ + Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] + ] + ], + ] = DoNotCare() + posonly_params: Union[ + Sequence[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Param]], + OneOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + ] = DoNotCare() + posonly_ind: Union[ + ParamSlashMatchType, + DoNotCareSentinel, + OneOf[ParamSlashMatchType], + AllOf[ParamSlashMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ParenthesizedWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): + first_line: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + empty_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + indent: Union[ + boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] + ] = DoNotCare() + last_line: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Pass(BaseSmallStatement, BaseMatcherNode): + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Plus(BaseUnaryOp, BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Power(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class PowerAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Raise(BaseSmallStatement, BaseMatcherNode): + exc: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + cause: Union[ + Optional["From"], + MetadataMatchType, + MatchIfTrue[Optional[cst.From]], + DoNotCareSentinel, + OneOf[ + Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] + ], + AllOf[ + Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] + ], + ] = DoNotCare() + whitespace_after_raise: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Return(BaseSmallStatement, BaseMatcherNode): + value: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + whitespace_after_return: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightCurlyBrace(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightParen(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightShift(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightShiftAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightSquareBracket(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Semicolon(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Set(BaseExpression, BaseSet, BaseMatcherNode): + elements: Union[ + Sequence[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseElement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode): + elt: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + for_in: Union[ + CompForMatchType, + DoNotCareSentinel, + OneOf[CompForMatchType], + AllOf[CompForMatchType], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseSmallStatementMatchType = Union[ + "BaseSmallStatement", MetadataMatchType, MatchIfTrue[cst.BaseSmallStatement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SimpleStatementLine(BaseStatement, BaseMatcherNode): + body: Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + trailing_whitespace: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SimpleStatementSuite(BaseSuite, BaseMatcherNode): + body: Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + ] + ], + ] = DoNotCare() + leading_whitespace: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + trailing_whitespace: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SimpleString(BaseExpression, BaseString, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SimpleWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Slice(BaseSlice, BaseMatcherNode): + lower: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + upper: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + step: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + first_colon: Union[ + ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] + ] = DoNotCare() + second_colon: Union[ + ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class StarredDictElement(BaseDictElement, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_value: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class StarredElement(BaseElement, BaseExpression, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_before_value: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +SubscriptElementMatchType = Union[ + "SubscriptElement", MetadataMatchType, MatchIfTrue[cst.SubscriptElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Subscript( + BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode +): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + slice: Union[ + Sequence[ + Union[ + SubscriptElementMatchType, + DoNotCareSentinel, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + AtLeastN[ + Union[ + SubscriptElementMatchType, + DoNotCareSentinel, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + AtMostN[ + Union[ + SubscriptElementMatchType, + DoNotCareSentinel, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.SubscriptElement]], + OneOf[ + Union[ + Sequence[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + AtLeastN[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + AtMostN[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.SubscriptElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + AtLeastN[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + AtMostN[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.SubscriptElement]], + ] + ], + ] = DoNotCare() + lbracket: Union[ + LeftSquareBracketMatchType, + DoNotCareSentinel, + OneOf[LeftSquareBracketMatchType], + AllOf[LeftSquareBracketMatchType], + ] = DoNotCare() + rbracket: Union[ + RightSquareBracketMatchType, + DoNotCareSentinel, + OneOf[RightSquareBracketMatchType], + AllOf[RightSquareBracketMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_value: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseSliceMatchType = Union["BaseSlice", MetadataMatchType, MatchIfTrue[cst.BaseSlice]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SubscriptElement(BaseMatcherNode): + slice: Union[ + BaseSliceMatchType, + DoNotCareSentinel, + OneOf[BaseSliceMatchType], + AllOf[BaseSliceMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Subtract(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SubtractAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TrailingWhitespace(BaseMatcherNode): + whitespace: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + comment: Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + ] + ], + AllOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + ] + ], + ] = DoNotCare() + newline: Union[ + NewlineMatchType, + DoNotCareSentinel, + OneOf[NewlineMatchType], + AllOf[NewlineMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ExceptHandlerMatchType = Union[ + "ExceptHandler", MetadataMatchType, MatchIfTrue[cst.ExceptHandler] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + handlers: Union[ + Sequence[ + Union[ + ExceptHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + AtLeastN[ + Union[ + ExceptHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ExceptHandler]], + OneOf[ + Union[ + Sequence[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + AtLeastN[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptHandler]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + AtLeastN[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptHandler]], + ] + ], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + finalbody: Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + AllOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ExceptStarHandlerMatchType = Union[ + "ExceptStarHandler", MetadataMatchType, MatchIfTrue[cst.ExceptStarHandler] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TryStar(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + handlers: Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + OneOf[ + Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + ] + ], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + finalbody: Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + AllOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Tuple( + BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode +): + elements: Union[ + Sequence[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseElement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseUnaryOpMatchType = Union[ + "BaseUnaryOp", MetadataMatchType, MatchIfTrue[cst.BaseUnaryOp] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class UnaryOperation(BaseExpression, BaseMatcherNode): + operator: Union[ + BaseUnaryOpMatchType, + DoNotCareSentinel, + OneOf[BaseUnaryOpMatchType], + AllOf[BaseUnaryOpMatchType], + ] = DoNotCare() + expression: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_while: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +WithItemMatchType = Union["WithItem", MetadataMatchType, MatchIfTrue[cst.WithItem]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + items: Union[ + Sequence[ + Union[ + WithItemMatchType, + DoNotCareSentinel, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + AtLeastN[ + Union[ + WithItemMatchType, + DoNotCareSentinel, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + AtMostN[ + Union[ + WithItemMatchType, + DoNotCareSentinel, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.WithItem]], + OneOf[ + Union[ + Sequence[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + AtLeastN[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + AtMostN[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.WithItem]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + AtLeastN[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + AtMostN[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.WithItem]], + ] + ], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + asynchronous: Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + lpar: Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] = DoNotCare() + rpar: Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] = DoNotCare() + whitespace_after_with: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class WithItem(BaseMatcherNode): + item: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + asname: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseExpressionOrFromOrNoneMatchType = Union[ + "BaseExpression", + "From", + None, + MetadataMatchType, + MatchIfTrue[Union[cst.BaseExpression, cst.From, None]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Yield(BaseExpression, BaseMatcherNode): + value: Union[ + BaseExpressionOrFromOrNoneMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionOrFromOrNoneMatchType], + AllOf[BaseExpressionOrFromOrNoneMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_yield: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +__all__ = [ + "Add", + "AddAssign", + "AllOf", + "And", + "AnnAssign", + "Annotation", + "Arg", + "AsName", + "Assert", + "Assign", + "AssignEqual", + "AssignTarget", + "Asynchronous", + "AtLeastN", + "AtMostN", + "Attribute", + "AugAssign", + "Await", + "BaseAssignTargetExpression", + "BaseAugOp", + "BaseBinaryOp", + "BaseBooleanOp", + "BaseComp", + "BaseCompOp", + "BaseCompoundStatement", + "BaseDelTargetExpression", + "BaseDict", + "BaseDictElement", + "BaseElement", + "BaseExpression", + "BaseFormattedStringContent", + "BaseList", + "BaseMatcherNode", + "BaseMetadataProvider", + "BaseNumber", + "BaseParenthesizableWhitespace", + "BaseSet", + "BaseSimpleComp", + "BaseSlice", + "BaseSmallStatement", + "BaseStatement", + "BaseString", + "BaseSuite", + "BaseUnaryOp", + "BinaryOperation", + "BitAnd", + "BitAndAssign", + "BitInvert", + "BitOr", + "BitOrAssign", + "BitXor", + "BitXorAssign", + "BooleanOperation", + "Break", + "Call", + "ClassDef", + "Colon", + "Comma", + "Comment", + "CompFor", + "CompIf", + "Comparison", + "ComparisonTarget", + "ConcatenatedString", + "Continue", + "Decorator", + "Del", + "Dict", + "DictComp", + "DictElement", + "Divide", + "DivideAssign", + "DoNotCare", + "DoNotCareSentinel", + "DoesNotMatch", + "Dot", + "Element", + "Ellipsis", + "Else", + "EmptyLine", + "Equal", + "ExceptHandler", + "ExceptStarHandler", + "Expr", + "Finally", + "Float", + "FloorDivide", + "FloorDivideAssign", + "For", + "FormattedString", + "FormattedStringExpression", + "FormattedStringText", + "From", + "FunctionDef", + "GeneratorExp", + "Global", + "GreaterThan", + "GreaterThanEqual", + "If", + "IfExp", + "Imaginary", + "Import", + "ImportAlias", + "ImportFrom", + "ImportStar", + "In", + "IndentedBlock", + "Index", + "Integer", + "Is", + "IsNot", + "Lambda", + "LeftCurlyBrace", + "LeftParen", + "LeftShift", + "LeftShiftAssign", + "LeftSquareBracket", + "LessThan", + "LessThanEqual", + "List", + "ListComp", + "Match", + "MatchAs", + "MatchCase", + "MatchClass", + "MatchDecoratorMismatch", + "MatchIfTrue", + "MatchKeywordElement", + "MatchList", + "MatchMapping", + "MatchMappingElement", + "MatchMetadata", + "MatchMetadataIfTrue", + "MatchOr", + "MatchOrElement", + "MatchPattern", + "MatchRegex", + "MatchSequence", + "MatchSequenceElement", + "MatchSingleton", + "MatchStar", + "MatchTuple", + "MatchValue", + "MatcherDecoratableTransformer", + "MatcherDecoratableVisitor", + "MatrixMultiply", + "MatrixMultiplyAssign", + "Minus", + "Module", + "Modulo", + "ModuloAssign", + "Multiply", + "MultiplyAssign", + "Name", + "NameItem", + "NamedExpr", + "Newline", + "Nonlocal", + "Not", + "NotEqual", + "NotIn", + "OneOf", + "Or", + "Param", + "ParamSlash", + "ParamStar", + "Parameters", + "ParenthesizedWhitespace", + "Pass", + "Plus", + "Power", + "PowerAssign", + "Raise", + "Return", + "RightCurlyBrace", + "RightParen", + "RightShift", + "RightShiftAssign", + "RightSquareBracket", + "SaveMatchedNode", + "Semicolon", + "Set", + "SetComp", + "SimpleStatementLine", + "SimpleStatementSuite", + "SimpleString", + "SimpleWhitespace", + "Slice", + "StarredDictElement", + "StarredElement", + "Subscript", + "SubscriptElement", + "Subtract", + "SubtractAssign", + "TrailingWhitespace", + "Try", + "TryStar", + "Tuple", + "TypeOf", + "UnaryOperation", + "While", + "With", + "WithItem", + "Yield", + "ZeroOrMore", + "ZeroOrOne", + "call_if_inside", + "call_if_not_inside", + "extract", + "extractall", + "findall", + "leave", + "matches", + "replace", + "visit", +] diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index 57162632..87475d05 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -1,363 +1,363 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -# This file was generated by libcst.codegen.gen_type_mapping -from typing import Dict as TypingDict, Type, Union - -from libcst._maybe_sentinel import MaybeSentinel -from libcst._nodes.base import CSTNode -from libcst._nodes.expression import ( - Annotation, - Arg, - Asynchronous, - Attribute, - Await, - BaseDictElement, - BaseElement, - BaseExpression, - BaseFormattedStringContent, - BaseSlice, - BinaryOperation, - BooleanOperation, - Call, - Comparison, - ComparisonTarget, - CompFor, - CompIf, - ConcatenatedString, - Dict, - DictComp, - DictElement, - Element, - Ellipsis, - Float, - FormattedString, - FormattedStringExpression, - FormattedStringText, - From, - GeneratorExp, - IfExp, - Imaginary, - Index, - Integer, - Lambda, - LeftCurlyBrace, - LeftParen, - LeftSquareBracket, - List, - ListComp, - Name, - NamedExpr, - Param, - Parameters, - ParamSlash, - ParamStar, - RightCurlyBrace, - RightParen, - RightSquareBracket, - Set, - SetComp, - SimpleString, - Slice, - StarredDictElement, - StarredElement, - Subscript, - SubscriptElement, - Tuple, - UnaryOperation, - Yield, -) -from libcst._nodes.module import Module - -from libcst._nodes.op import ( - Add, - AddAssign, - And, - AssignEqual, - BaseAugOp, - BaseBinaryOp, - BaseBooleanOp, - BaseCompOp, - BaseUnaryOp, - BitAnd, - BitAndAssign, - BitInvert, - BitOr, - BitOrAssign, - BitXor, - BitXorAssign, - Colon, - Comma, - Divide, - DivideAssign, - Dot, - Equal, - FloorDivide, - FloorDivideAssign, - GreaterThan, - GreaterThanEqual, - ImportStar, - In, - Is, - IsNot, - LeftShift, - LeftShiftAssign, - LessThan, - LessThanEqual, - MatrixMultiply, - MatrixMultiplyAssign, - Minus, - Modulo, - ModuloAssign, - Multiply, - MultiplyAssign, - Not, - NotEqual, - NotIn, - Or, - Plus, - Power, - PowerAssign, - RightShift, - RightShiftAssign, - Semicolon, - Subtract, - SubtractAssign, -) -from libcst._nodes.statement import ( - AnnAssign, - AsName, - Assert, - Assign, - AssignTarget, - AugAssign, - BaseSmallStatement, - BaseStatement, - BaseSuite, - Break, - ClassDef, - Continue, - Decorator, - Del, - Else, - ExceptHandler, - ExceptStarHandler, - Expr, - Finally, - For, - FunctionDef, - Global, - If, - Import, - ImportAlias, - ImportFrom, - IndentedBlock, - Match, - MatchAs, - MatchCase, - MatchClass, - MatchKeywordElement, - MatchList, - MatchMapping, - MatchMappingElement, - MatchOr, - MatchOrElement, - MatchPattern, - MatchSequence, - MatchSequenceElement, - MatchSingleton, - MatchStar, - MatchTuple, - MatchValue, - NameItem, - Nonlocal, - Pass, - Raise, - Return, - SimpleStatementLine, - SimpleStatementSuite, - Try, - TryStar, - While, - With, - WithItem, -) -from libcst._nodes.whitespace import ( - BaseParenthesizableWhitespace, - Comment, - EmptyLine, - Newline, - ParenthesizedWhitespace, - SimpleWhitespace, - TrailingWhitespace, -) -from libcst._removal_sentinel import RemovalSentinel - - -TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { - Add: BaseBinaryOp, - AddAssign: BaseAugOp, - And: BaseBooleanOp, - AnnAssign: Union[BaseSmallStatement, RemovalSentinel], - Annotation: Annotation, - Arg: Union[Arg, RemovalSentinel], - AsName: AsName, - Assert: Union[BaseSmallStatement, RemovalSentinel], - Assign: Union[BaseSmallStatement, RemovalSentinel], - AssignEqual: Union[AssignEqual, MaybeSentinel], - AssignTarget: Union[AssignTarget, RemovalSentinel], - Asynchronous: Asynchronous, - Attribute: BaseExpression, - AugAssign: Union[BaseSmallStatement, RemovalSentinel], - Await: BaseExpression, - BinaryOperation: BaseExpression, - BitAnd: BaseBinaryOp, - BitAndAssign: BaseAugOp, - BitInvert: BaseUnaryOp, - BitOr: Union[BaseBinaryOp, MaybeSentinel], - BitOrAssign: BaseAugOp, - BitXor: BaseBinaryOp, - BitXorAssign: BaseAugOp, - BooleanOperation: BaseExpression, - Break: Union[BaseSmallStatement, RemovalSentinel], - Call: BaseExpression, - ClassDef: Union[BaseStatement, RemovalSentinel], - Colon: Union[Colon, MaybeSentinel], - Comma: Union[Comma, MaybeSentinel], - Comment: Comment, - CompFor: CompFor, - CompIf: CompIf, - Comparison: BaseExpression, - ComparisonTarget: Union[ComparisonTarget, RemovalSentinel], - ConcatenatedString: BaseExpression, - Continue: Union[BaseSmallStatement, RemovalSentinel], - Decorator: Union[Decorator, RemovalSentinel], - Del: Union[BaseSmallStatement, RemovalSentinel], - Dict: BaseExpression, - DictComp: BaseExpression, - DictElement: Union[BaseDictElement, RemovalSentinel], - Divide: BaseBinaryOp, - DivideAssign: BaseAugOp, - Dot: Union[Dot, RemovalSentinel], - Element: Union[BaseElement, RemovalSentinel], - Ellipsis: BaseExpression, - Else: Else, - EmptyLine: Union[EmptyLine, RemovalSentinel], - Equal: BaseCompOp, - ExceptHandler: Union[ExceptHandler, RemovalSentinel], - ExceptStarHandler: Union[ExceptStarHandler, RemovalSentinel], - Expr: Union[BaseSmallStatement, RemovalSentinel], - Finally: Finally, - Float: BaseExpression, - FloorDivide: BaseBinaryOp, - FloorDivideAssign: BaseAugOp, - For: Union[BaseStatement, RemovalSentinel], - FormattedString: BaseExpression, - FormattedStringExpression: Union[BaseFormattedStringContent, RemovalSentinel], - FormattedStringText: Union[BaseFormattedStringContent, RemovalSentinel], - From: From, - FunctionDef: Union[BaseStatement, RemovalSentinel], - GeneratorExp: BaseExpression, - Global: Union[BaseSmallStatement, RemovalSentinel], - GreaterThan: BaseCompOp, - GreaterThanEqual: BaseCompOp, - If: Union[BaseStatement, RemovalSentinel], - IfExp: BaseExpression, - Imaginary: BaseExpression, - Import: Union[BaseSmallStatement, RemovalSentinel], - ImportAlias: Union[ImportAlias, RemovalSentinel], - ImportFrom: Union[BaseSmallStatement, RemovalSentinel], - ImportStar: ImportStar, - In: BaseCompOp, - IndentedBlock: BaseSuite, - Index: BaseSlice, - Integer: BaseExpression, - Is: BaseCompOp, - IsNot: BaseCompOp, - Lambda: BaseExpression, - LeftCurlyBrace: LeftCurlyBrace, - LeftParen: Union[LeftParen, MaybeSentinel, RemovalSentinel], - LeftShift: BaseBinaryOp, - LeftShiftAssign: BaseAugOp, - LeftSquareBracket: LeftSquareBracket, - LessThan: BaseCompOp, - LessThanEqual: BaseCompOp, - List: BaseExpression, - ListComp: BaseExpression, - Match: Union[BaseStatement, RemovalSentinel], - MatchAs: MatchPattern, - MatchCase: MatchCase, - MatchClass: MatchPattern, - MatchKeywordElement: Union[MatchKeywordElement, RemovalSentinel], - MatchList: MatchPattern, - MatchMapping: MatchPattern, - MatchMappingElement: Union[MatchMappingElement, RemovalSentinel], - MatchOr: MatchPattern, - MatchOrElement: Union[MatchOrElement, RemovalSentinel], - MatchPattern: MatchPattern, - MatchSequence: MatchPattern, - MatchSequenceElement: Union[MatchSequenceElement, RemovalSentinel], - MatchSingleton: MatchPattern, - MatchStar: MatchStar, - MatchTuple: MatchPattern, - MatchValue: MatchPattern, - MatrixMultiply: BaseBinaryOp, - MatrixMultiplyAssign: BaseAugOp, - Minus: BaseUnaryOp, - Module: Module, - Modulo: BaseBinaryOp, - ModuloAssign: BaseAugOp, - Multiply: BaseBinaryOp, - MultiplyAssign: BaseAugOp, - Name: BaseExpression, - NameItem: Union[NameItem, RemovalSentinel], - NamedExpr: BaseExpression, - Newline: Newline, - Nonlocal: Union[BaseSmallStatement, RemovalSentinel], - Not: BaseUnaryOp, - NotEqual: BaseCompOp, - NotIn: BaseCompOp, - Or: BaseBooleanOp, - Param: Union[Param, MaybeSentinel, RemovalSentinel], - ParamSlash: Union[ParamSlash, MaybeSentinel], - ParamStar: Union[ParamStar, MaybeSentinel], - Parameters: Parameters, - ParenthesizedWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], - Pass: Union[BaseSmallStatement, RemovalSentinel], - Plus: BaseUnaryOp, - Power: BaseBinaryOp, - PowerAssign: BaseAugOp, - Raise: Union[BaseSmallStatement, RemovalSentinel], - Return: Union[BaseSmallStatement, RemovalSentinel], - RightCurlyBrace: RightCurlyBrace, - RightParen: Union[RightParen, MaybeSentinel, RemovalSentinel], - RightShift: BaseBinaryOp, - RightShiftAssign: BaseAugOp, - RightSquareBracket: RightSquareBracket, - Semicolon: Union[Semicolon, MaybeSentinel], - Set: BaseExpression, - SetComp: BaseExpression, - SimpleStatementLine: Union[BaseStatement, RemovalSentinel], - SimpleStatementSuite: BaseSuite, - SimpleString: BaseExpression, - SimpleWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], - Slice: BaseSlice, - StarredDictElement: Union[BaseDictElement, RemovalSentinel], - StarredElement: BaseExpression, - Subscript: BaseExpression, - SubscriptElement: Union[SubscriptElement, RemovalSentinel], - Subtract: BaseBinaryOp, - SubtractAssign: BaseAugOp, - TrailingWhitespace: TrailingWhitespace, - Try: Union[BaseStatement, RemovalSentinel], - TryStar: Union[BaseStatement, RemovalSentinel], - Tuple: BaseExpression, - UnaryOperation: BaseExpression, - While: Union[BaseStatement, RemovalSentinel], - With: Union[BaseStatement, RemovalSentinel], - WithItem: Union[WithItem, RemovalSentinel], - Yield: BaseExpression, -} +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +# This file was generated by libcst.codegen.gen_type_mapping +from typing import Dict as TypingDict, Type, Union + +from libcst._maybe_sentinel import MaybeSentinel +from libcst._nodes.base import CSTNode +from libcst._nodes.expression import ( + Annotation, + Arg, + Asynchronous, + Attribute, + Await, + BaseDictElement, + BaseElement, + BaseExpression, + BaseFormattedStringContent, + BaseSlice, + BinaryOperation, + BooleanOperation, + Call, + Comparison, + ComparisonTarget, + CompFor, + CompIf, + ConcatenatedString, + Dict, + DictComp, + DictElement, + Element, + Ellipsis, + Float, + FormattedString, + FormattedStringExpression, + FormattedStringText, + From, + GeneratorExp, + IfExp, + Imaginary, + Index, + Integer, + Lambda, + LeftCurlyBrace, + LeftParen, + LeftSquareBracket, + List, + ListComp, + Name, + NamedExpr, + Param, + Parameters, + ParamSlash, + ParamStar, + RightCurlyBrace, + RightParen, + RightSquareBracket, + Set, + SetComp, + SimpleString, + Slice, + StarredDictElement, + StarredElement, + Subscript, + SubscriptElement, + Tuple, + UnaryOperation, + Yield, +) +from libcst._nodes.module import Module + +from libcst._nodes.op import ( + Add, + AddAssign, + And, + AssignEqual, + BaseAugOp, + BaseBinaryOp, + BaseBooleanOp, + BaseCompOp, + BaseUnaryOp, + BitAnd, + BitAndAssign, + BitInvert, + BitOr, + BitOrAssign, + BitXor, + BitXorAssign, + Colon, + Comma, + Divide, + DivideAssign, + Dot, + Equal, + FloorDivide, + FloorDivideAssign, + GreaterThan, + GreaterThanEqual, + ImportStar, + In, + Is, + IsNot, + LeftShift, + LeftShiftAssign, + LessThan, + LessThanEqual, + MatrixMultiply, + MatrixMultiplyAssign, + Minus, + Modulo, + ModuloAssign, + Multiply, + MultiplyAssign, + Not, + NotEqual, + NotIn, + Or, + Plus, + Power, + PowerAssign, + RightShift, + RightShiftAssign, + Semicolon, + Subtract, + SubtractAssign, +) +from libcst._nodes.statement import ( + AnnAssign, + AsName, + Assert, + Assign, + AssignTarget, + AugAssign, + BaseSmallStatement, + BaseStatement, + BaseSuite, + Break, + ClassDef, + Continue, + Decorator, + Del, + Else, + ExceptHandler, + ExceptStarHandler, + Expr, + Finally, + For, + FunctionDef, + Global, + If, + Import, + ImportAlias, + ImportFrom, + IndentedBlock, + Match, + MatchAs, + MatchCase, + MatchClass, + MatchKeywordElement, + MatchList, + MatchMapping, + MatchMappingElement, + MatchOr, + MatchOrElement, + MatchPattern, + MatchSequence, + MatchSequenceElement, + MatchSingleton, + MatchStar, + MatchTuple, + MatchValue, + NameItem, + Nonlocal, + Pass, + Raise, + Return, + SimpleStatementLine, + SimpleStatementSuite, + Try, + TryStar, + While, + With, + WithItem, +) +from libcst._nodes.whitespace import ( + BaseParenthesizableWhitespace, + Comment, + EmptyLine, + Newline, + ParenthesizedWhitespace, + SimpleWhitespace, + TrailingWhitespace, +) +from libcst._removal_sentinel import RemovalSentinel + + +TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { + Add: BaseBinaryOp, + AddAssign: BaseAugOp, + And: BaseBooleanOp, + AnnAssign: Union[BaseSmallStatement, RemovalSentinel], + Annotation: Annotation, + Arg: Union[Arg, RemovalSentinel], + AsName: AsName, + Assert: Union[BaseSmallStatement, RemovalSentinel], + Assign: Union[BaseSmallStatement, RemovalSentinel], + AssignEqual: Union[AssignEqual, MaybeSentinel], + AssignTarget: Union[AssignTarget, RemovalSentinel], + Asynchronous: Asynchronous, + Attribute: BaseExpression, + AugAssign: Union[BaseSmallStatement, RemovalSentinel], + Await: BaseExpression, + BinaryOperation: BaseExpression, + BitAnd: BaseBinaryOp, + BitAndAssign: BaseAugOp, + BitInvert: BaseUnaryOp, + BitOr: Union[BaseBinaryOp, MaybeSentinel], + BitOrAssign: BaseAugOp, + BitXor: BaseBinaryOp, + BitXorAssign: BaseAugOp, + BooleanOperation: BaseExpression, + Break: Union[BaseSmallStatement, RemovalSentinel], + Call: BaseExpression, + ClassDef: Union[BaseStatement, RemovalSentinel], + Colon: Union[Colon, MaybeSentinel], + Comma: Union[Comma, MaybeSentinel], + Comment: Comment, + CompFor: CompFor, + CompIf: CompIf, + Comparison: BaseExpression, + ComparisonTarget: Union[ComparisonTarget, RemovalSentinel], + ConcatenatedString: BaseExpression, + Continue: Union[BaseSmallStatement, RemovalSentinel], + Decorator: Union[Decorator, RemovalSentinel], + Del: Union[BaseSmallStatement, RemovalSentinel], + Dict: BaseExpression, + DictComp: BaseExpression, + DictElement: Union[BaseDictElement, RemovalSentinel], + Divide: BaseBinaryOp, + DivideAssign: BaseAugOp, + Dot: Union[Dot, RemovalSentinel], + Element: Union[BaseElement, RemovalSentinel], + Ellipsis: BaseExpression, + Else: Else, + EmptyLine: Union[EmptyLine, RemovalSentinel], + Equal: BaseCompOp, + ExceptHandler: Union[ExceptHandler, RemovalSentinel], + ExceptStarHandler: Union[ExceptStarHandler, RemovalSentinel], + Expr: Union[BaseSmallStatement, RemovalSentinel], + Finally: Finally, + Float: BaseExpression, + FloorDivide: BaseBinaryOp, + FloorDivideAssign: BaseAugOp, + For: Union[BaseStatement, RemovalSentinel], + FormattedString: BaseExpression, + FormattedStringExpression: Union[BaseFormattedStringContent, RemovalSentinel], + FormattedStringText: Union[BaseFormattedStringContent, RemovalSentinel], + From: From, + FunctionDef: Union[BaseStatement, RemovalSentinel], + GeneratorExp: BaseExpression, + Global: Union[BaseSmallStatement, RemovalSentinel], + GreaterThan: BaseCompOp, + GreaterThanEqual: BaseCompOp, + If: Union[BaseStatement, RemovalSentinel], + IfExp: BaseExpression, + Imaginary: BaseExpression, + Import: Union[BaseSmallStatement, RemovalSentinel], + ImportAlias: Union[ImportAlias, RemovalSentinel], + ImportFrom: Union[BaseSmallStatement, RemovalSentinel], + ImportStar: ImportStar, + In: BaseCompOp, + IndentedBlock: BaseSuite, + Index: BaseSlice, + Integer: BaseExpression, + Is: BaseCompOp, + IsNot: BaseCompOp, + Lambda: BaseExpression, + LeftCurlyBrace: LeftCurlyBrace, + LeftParen: Union[LeftParen, MaybeSentinel, RemovalSentinel], + LeftShift: BaseBinaryOp, + LeftShiftAssign: BaseAugOp, + LeftSquareBracket: LeftSquareBracket, + LessThan: BaseCompOp, + LessThanEqual: BaseCompOp, + List: BaseExpression, + ListComp: BaseExpression, + Match: Union[BaseStatement, RemovalSentinel], + MatchAs: MatchPattern, + MatchCase: MatchCase, + MatchClass: MatchPattern, + MatchKeywordElement: Union[MatchKeywordElement, RemovalSentinel], + MatchList: MatchPattern, + MatchMapping: MatchPattern, + MatchMappingElement: Union[MatchMappingElement, RemovalSentinel], + MatchOr: MatchPattern, + MatchOrElement: Union[MatchOrElement, RemovalSentinel], + MatchPattern: MatchPattern, + MatchSequence: MatchPattern, + MatchSequenceElement: Union[MatchSequenceElement, RemovalSentinel], + MatchSingleton: MatchPattern, + MatchStar: MatchStar, + MatchTuple: MatchPattern, + MatchValue: MatchPattern, + MatrixMultiply: BaseBinaryOp, + MatrixMultiplyAssign: BaseAugOp, + Minus: BaseUnaryOp, + Module: Module, + Modulo: BaseBinaryOp, + ModuloAssign: BaseAugOp, + Multiply: BaseBinaryOp, + MultiplyAssign: BaseAugOp, + Name: BaseExpression, + NameItem: Union[NameItem, RemovalSentinel], + NamedExpr: BaseExpression, + Newline: Newline, + Nonlocal: Union[BaseSmallStatement, RemovalSentinel], + Not: BaseUnaryOp, + NotEqual: BaseCompOp, + NotIn: BaseCompOp, + Or: BaseBooleanOp, + Param: Union[Param, MaybeSentinel, RemovalSentinel], + ParamSlash: Union[ParamSlash, MaybeSentinel], + ParamStar: Union[ParamStar, MaybeSentinel], + Parameters: Parameters, + ParenthesizedWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], + Pass: Union[BaseSmallStatement, RemovalSentinel], + Plus: BaseUnaryOp, + Power: BaseBinaryOp, + PowerAssign: BaseAugOp, + Raise: Union[BaseSmallStatement, RemovalSentinel], + Return: Union[BaseSmallStatement, RemovalSentinel], + RightCurlyBrace: RightCurlyBrace, + RightParen: Union[RightParen, MaybeSentinel, RemovalSentinel], + RightShift: BaseBinaryOp, + RightShiftAssign: BaseAugOp, + RightSquareBracket: RightSquareBracket, + Semicolon: Union[Semicolon, MaybeSentinel], + Set: BaseExpression, + SetComp: BaseExpression, + SimpleStatementLine: Union[BaseStatement, RemovalSentinel], + SimpleStatementSuite: BaseSuite, + SimpleString: BaseExpression, + SimpleWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], + Slice: BaseSlice, + StarredDictElement: Union[BaseDictElement, RemovalSentinel], + StarredElement: BaseExpression, + Subscript: BaseExpression, + SubscriptElement: Union[SubscriptElement, RemovalSentinel], + Subtract: BaseBinaryOp, + SubtractAssign: BaseAugOp, + TrailingWhitespace: TrailingWhitespace, + Try: Union[BaseStatement, RemovalSentinel], + TryStar: Union[BaseStatement, RemovalSentinel], + Tuple: BaseExpression, + UnaryOperation: BaseExpression, + While: Union[BaseStatement, RemovalSentinel], + With: Union[BaseStatement, RemovalSentinel], + WithItem: Union[WithItem, RemovalSentinel], + Yield: BaseExpression, +} diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 14622c1a..5e23357d 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -1004,7 +1004,7 @@ parser! { make_slice(l, col, u, rest) } / e:starred_expression() { make_index_from_arg(e) } - / v:expression() { make_index(v) } + / v:named_expression() { make_index(v) } rule atom() -> Expression<'input, 'a> = n:name() { Expression::Name(Box::new(n)) } diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 7c0f0788..d470a78a 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -114,12 +114,8 @@ pub enum TokType { Dedent, Async, Await, - // TODO; add support for these - #[allow(dead_code)] FStringStart, - #[allow(dead_code)] FStringString, - #[allow(dead_code)] FStringEnd, EndMarker, } diff --git a/native/libcst/src/tokenizer/core/string_types.rs b/native/libcst/src/tokenizer/core/string_types.rs index 8f9e0cf0..d14d13f5 100644 --- a/native/libcst/src/tokenizer/core/string_types.rs +++ b/native/libcst/src/tokenizer/core/string_types.rs @@ -98,11 +98,10 @@ impl FStringNode { } pub fn close_parentheses(&mut self) { - self.parentheses_count -= 1; - if self.parentheses_count == 0 { - // No parentheses means that the format spec is also finished. - self.format_spec_count = 0; + if self.is_in_format_spec() { + self.format_spec_count -= 1; } + self.parentheses_count -= 1; } pub fn allow_multiline(&self) -> bool { diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index 4e8ce4d3..08d26e55 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -814,3 +814,29 @@ fn test_inconsistent_indentation_at_eof() { ]) ) } + +#[test] +fn test_nested_f_string_specs() { + let config = TokConfig { + split_fstring: true, + ..default_config() + }; + assert_eq!( + tokenize_all("f'{_:{_:}{_}}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::Op, "{"), + (TokType::Name, "_"), + (TokType::Op, ":"), + (TokType::Op, "{"), + (TokType::Name, "_"), + (TokType::Op, ":"), + (TokType::Op, "}"), + (TokType::Op, "{"), + (TokType::Name, "_"), + (TokType::Op, "}"), + (TokType::Op, "}"), + (TokType::FStringEnd, "'") + ]) + ) +} diff --git a/native/libcst/src/tokenizer/text_position/mod.rs b/native/libcst/src/tokenizer/text_position/mod.rs index 9c394d52..2e58600a 100644 --- a/native/libcst/src/tokenizer/text_position/mod.rs +++ b/native/libcst/src/tokenizer/text_position/mod.rs @@ -117,6 +117,10 @@ impl<'t> TextPosition<'t> { .inner_char_column_number .checked_sub(1) .expect("cannot back up past the beginning of a line."); + self.inner_byte_column_number = self + .inner_byte_column_number + .checked_sub(cw.byte_width) + .expect("cannot back up past the beginning of a line."); self.inner_byte_idx -= cw.byte_width; } else { panic!("Tried to backup past the beginning of the text.") @@ -217,6 +221,7 @@ impl fmt::Debug for TextPosition<'_> { .field("char_widths", &EllipsisDebug) .field("inner_byte_idx", &self.inner_byte_idx) .field("inner_char_column_number", &self.inner_char_column_number) + .field("inner_byte_column_number", &self.inner_byte_column_number) .field("inner_line_number", &self.inner_line_number) .finish() } diff --git a/native/libcst/tests/fixtures/expr.py b/native/libcst/tests/fixtures/expr.py index c1c4e9b7..abb78ab9 100644 --- a/native/libcst/tests/fixtures/expr.py +++ b/native/libcst/tests/fixtures/expr.py @@ -44,6 +44,7 @@ lambda a, b, c=True, *vararg, d=(v1 << 2), e='str', **kwargs : a + b manylambdas = lambda x=lambda y=lambda z=1: z: y(): x() foo = (lambda port_id, ignore_missing: {"port1": port1_resource, "port2": port2_resource}[port_id]) 1 if True else 2 +_ if 0else _ str or None if True else str or bytes or None (str or None) if True else (str or bytes or None) str or None if (1 if True else 2) else str or bytes or None diff --git a/native/libcst/tests/fixtures/super_strings.py b/native/libcst/tests/fixtures/super_strings.py index d993f5ab..82457279 100644 --- a/native/libcst/tests/fixtures/super_strings.py +++ b/native/libcst/tests/fixtures/super_strings.py @@ -30,3 +30,5 @@ print(f"{self.ERASE_CURRENT_LINE}{self._human_seconds(elapsed_time)} {percent:.{ f'\{{\}}' f"regexp_like(path, '.*\{file_type}$')" f"\lfoo" + +f"{_:{_:}{a}}" \ No newline at end of file diff --git a/native/libcst/tests/fixtures/wonky_walrus.py b/native/libcst/tests/fixtures/wonky_walrus.py index d0916ab8..d506b169 100644 --- a/native/libcst/tests/fixtures/wonky_walrus.py +++ b/native/libcst/tests/fixtures/wonky_walrus.py @@ -10,4 +10,6 @@ while f := x(): if f := x(): pass f(y:=1) -f(x, y := 1 ) \ No newline at end of file +f(x, y := 1 ) + +_[_:=10] \ No newline at end of file From 5eec991ef346ba5a8a8a72d9677aa6703a65b4ff Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 7 Jun 2023 13:40:34 +0200 Subject: [PATCH 381/632] Fix parsing of code without trailing newlines (#940) When the input doesn't have a trailing newline, but the last line had exactly the amount of bytes as the current indentation level, the tokenizer didn't emit a fake newline, causing parse errors (the grammar expects newlines to conform with the Python spec). I don't see any reason for fake newlines to be omitted in these cases, so this PR removes that condition from the tokenizer. Reported in #930. --- native/libcst/src/lib.rs | 16 ++++++++++++++++ native/libcst/src/tokenizer/core/mod.rs | 5 +---- native/libcst/src/tokenizer/tests.rs | 13 +++++++++++++ 3 files changed, 30 insertions(+), 4 deletions(-) diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index 6d2bdd99..9bd6bb99 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -149,6 +149,22 @@ mod test { parse_module("def g(a, b): ...", None).expect("parse error"); } + #[test] + fn test_single_statement_with_no_newline() { + for src in &[ + "(\n \\\n)", + "(\n \\\n)", + "(\n '''\n''')", + "del _", + "if _:\n '''\n)'''", + "if _:\n ('''\n''')", + "if _:\n '''\n '''", + "if _:\n '''\n ''' ", + ] { + parse_module(src, None).unwrap_or_else(|e| panic!("'{}' doesn't parse: {}", src, e)); + } + } + #[test] fn bol_offset_first_line() { assert_eq!(0, bol_offset("hello", 1)); diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index d470a78a..359451a3 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -330,10 +330,7 @@ impl<'t> TokState<'t> { return match self.text_pos.peek() { // Check for EOF now None => { - if self.missing_nl_before_eof - && self.text_pos.byte_column_number() != self.bol_width - && !self.blank_line - { + if self.missing_nl_before_eof && !self.blank_line { self.at_bol = true; self.missing_nl_before_eof = false; Ok(TokType::Newline) diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index 08d26e55..a24b977b 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -718,6 +718,19 @@ fn test_fake_newline() { ); } +#[test] +fn test_fake_newline_when_at_bol() { + assert_eq!( + tokenize_with_end_marker("(\n \\\n)", &default_config()), + Ok(vec![ + (TokType::Op, "("), + (TokType::Op, ")"), + (TokType::Newline, ""), + (TokType::EndMarker, "") + ]) + ) +} + #[test] fn test_no_fake_newline_for_empty_input() { assert_eq!( From 203a2f5bc58c9570fd809b34dd0f92038d81bba2 Mon Sep 17 00:00:00 2001 From: Sergii Dymchenko Date: Wed, 7 Jun 2023 04:45:53 -0700 Subject: [PATCH 382/632] Codemod CLI: Print diff only when there is a change (#945) Otherwise lots of empty lines are printed. --- libcst/codemod/_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 91b85c4d..61e34e1c 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -473,7 +473,7 @@ def _print_parallel_result( ) # In unified diff mode, the code is a diff we must print. - if unified_diff: + if unified_diff and result.code: print(result.code) From 3cacca1a1029f05707e50703b49fe3dd860aa839 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 7 Jun 2023 13:26:02 +0100 Subject: [PATCH 383/632] Update changelog to 1.0.1 --- CHANGELOG.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aca687c2..12eed756 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,19 @@ +# 1.0.1 - 2023-06-07 + +## Fixed +* Fix type of `evaluated_value` on string to allow bytes by @ljodal in https://github.com/Instagram/LibCST/pull/721 +* Fix Sentinal typo by @kit1980 in https://github.com/Instagram/LibCST/pull/948 +* Allow no whitespace after lambda body in certain cases by @zsol in https://github.com/Instagram/LibCST/pull/939 +* Fix whitespace, fstring, walrus related parse errors (#939, #938, #937, +#936, #935, #934, #933, #932, #931) by @zsol in https://github.com/Instagram/LibCST/pull/940 +* Codemod CLI: Print diff only when there is a change by @kit1980 in https://github.com/Instagram/LibCST/pull/945 + +## New Contributors +* @ljodal made their first contribution in https://github.com/Instagram/LibCST/pull/721 +* @kit1980 made their first contribution in https://github.com/Instagram/LibCST/pull/948 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.0.0...v1.0.1 + # 1.0.0 - 2023-05-25 The first major release of LibCST is essentially the same as 0.4.10, but using the From 50d48c1539639ab8f054a3fb678db5c5dee120af Mon Sep 17 00:00:00 2001 From: Martin DeMello Date: Wed, 14 Jun 2023 01:30:56 -0700 Subject: [PATCH 384/632] Do not annotate the same variable multiple times in ApplyTypeAnnotationsVisitor (#956) --- .../visitors/_apply_type_annotations.py | 26 +++++---- .../tests/test_apply_type_annotations.py | 55 +++++++++++++++++++ 2 files changed, 71 insertions(+), 10 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 2f2e3ac9..9c826cc4 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -694,6 +694,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # quotations to avoid undefined forward references in type annotations. self.global_names: Set[str] = set() + # We use this to avoid annotating multiple assignments to the same + # symbol in a given scope + self.already_annotated: Set[str] = set() + @staticmethod def store_stub_in_context( context: CodemodContext, @@ -945,17 +949,19 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): name = get_full_name_for_node(only_target) if name is not None: self.qualifier.append(name) - if ( - self._qualifier_name() in self.annotations.attributes - and not isinstance(only_target, (cst.Attribute, cst.Subscript)) + qualifier_name = self._qualifier_name() + if qualifier_name in self.annotations.attributes and not isinstance( + only_target, (cst.Attribute, cst.Subscript) ): - annotation = self.annotations.attributes[self._qualifier_name()] - self.qualifier.pop() - return self._apply_annotation_to_attribute_or_global( - name=name, - annotation=annotation, - value=node.value, - ) + if qualifier_name not in self.already_annotated: + self.already_annotated.add(qualifier_name) + annotation = self.annotations.attributes[qualifier_name] + self.qualifier.pop() + return self._apply_annotation_to_attribute_or_global( + name=name, + annotation=annotation, + value=node.value, + ) else: self.qualifier.pop() return updated_node diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index 3ef92aed..c7ea5c22 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -1910,3 +1910,58 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) def test_valid_assign_expressions(self, stub: str, before: str, after: str) -> None: self.run_simple_test_case(stub=stub, before=before, after=after) + + @data_provider( + { + "toplevel": ( + """ + x: int + """, + """ + x = 1 + x = 2 + """, + """ + x: int = 1 + x = 2 + """, + ), + "class": ( + """ + class A: + x: int + """, + """ + class A: + x = 1 + x = 2 + """, + """ + class A: + x: int = 1 + x = 2 + """, + ), + "mixed": ( + """ + x: int + class A: + x: int + """, + """ + x = 1 + class A: + x = 1 + x = 2 + """, + """ + x: int = 1 + class A: + x: int = 1 + x = 2 + """, + ), + } + ) + def test_no_duplicate_annotations(self, stub: str, before: str, after: str) -> None: + self.run_simple_test_case(stub=stub, before=before, after=after) From a3f5bf97d631e79c3395a249e15645cfbc225a4d Mon Sep 17 00:00:00 2001 From: Alessandro Pietro Bardelli Date: Wed, 14 Jun 2023 15:39:03 +0200 Subject: [PATCH 385/632] Allow pyo3 >=0.17 (#957) --- native/libcst/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index e068fefa..22440666 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -30,7 +30,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.9" -pyo3 = { version = "0.17", optional = true } +pyo3 = { version = ">=0.17", optional = true } thiserror = "1.0.37" peg = "0.8.1" chic = "1.2.2" From 0fb902121890f98bd73136f0057a9b7fb0561ac9 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 18 Jul 2023 10:03:10 +0100 Subject: [PATCH 386/632] Don't swallow trailing whitespace (#976) --- native/libcst/src/nodes/module.rs | 8 ++------ native/libcst/tests/fixtures/trailing_whitespace.py | 5 +++++ 2 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 native/libcst/tests/fixtures/trailing_whitespace.py diff --git a/native/libcst/src/nodes/module.rs b/native/libcst/src/nodes/module.rs index 656b7d39..ff9a2a73 100644 --- a/native/libcst/src/nodes/module.rs +++ b/native/libcst/src/nodes/module.rs @@ -77,12 +77,8 @@ impl<'r, 'a> Inflate<'a> for DeflatedModule<'r, 'a> { } } if let Some(num) = last_indented { - if num + 1 == footer.len() { - footer = vec![]; - } else { - let (_, rest) = footer.split_at(num + 1); - footer = rest.to_vec(); - } + let (_, rest) = footer.split_at(num); + footer = rest.to_vec(); } } else { swap(&mut header, &mut footer); diff --git a/native/libcst/tests/fixtures/trailing_whitespace.py b/native/libcst/tests/fixtures/trailing_whitespace.py new file mode 100644 index 00000000..5a01c197 --- /dev/null +++ b/native/libcst/tests/fixtures/trailing_whitespace.py @@ -0,0 +1,5 @@ + + +x = 42 +print(x) + \ No newline at end of file From 9eab2f037fa3680e0627d23038457b65dbb4078f Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 25 Jul 2023 16:13:48 +0100 Subject: [PATCH 387/632] Don't insert duplicate imports (#981) --- libcst/codemod/commands/rename.py | 28 ++++++++--------- libcst/codemod/commands/tests/test_rename.py | 32 ++++++++++++++++++++ 2 files changed, 45 insertions(+), 15 deletions(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 472d1e61..4bd0ee3d 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -128,28 +128,26 @@ class RenameCommand(VisitorBasedCodemodCommand): ): # Might, be in use elsewhere in the code, so schedule a potential removal, and add another alias. new_names.append(import_alias) - self.scheduled_removals.add(original_node) - new_names.append( - cst.ImportAlias( - name=cst.Name( - value=self.gen_replacement_module(import_alias_full_name) - ) - ) - ) + replacement_module = self.gen_replacement_module(import_alias_full_name) self.bypass_import = True + if replacement_module != import_alias_name.value: + self.scheduled_removals.add(original_node) + new_names.append( + cst.ImportAlias(name=cst.Name(value=replacement_module)) + ) elif isinstance( import_alias_name, cst.Attribute ) and self.old_name.startswith(import_alias_full_name + "."): # Same idea as above. new_names.append(import_alias) - self.scheduled_removals.add(original_node) - new_name_node: Union[ - cst.Attribute, cst.Name - ] = self.gen_name_or_attr_node( - self.gen_replacement_module(import_alias_full_name) - ) - new_names.append(cst.ImportAlias(name=new_name_node)) + replacement_module = self.gen_replacement_module(import_alias_full_name) self.bypass_import = True + if replacement_module != import_alias_full_name: + self.scheduled_removals.add(original_node) + new_name_node: Union[ + cst.Attribute, cst.Name + ] = self.gen_name_or_attr_node(replacement_module) + new_names.append(cst.ImportAlias(name=new_name_node)) else: new_names.append(import_alias) diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index 2c9daed1..2f897119 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -275,6 +275,38 @@ class TestRenameCommand(CodemodTest): new_name="a.b.module_3.Class_3", ) + def test_import_same_module(self) -> None: + before = """ + import logging + logging.warn(1) + """ + after = """ + import logging + logging.warning(1) + """ + self.assertCodemod( + before, + after, + old_name="logging.warn", + new_name="logging.warning", + ) + + def test_import_same_dotted_module(self) -> None: + before = """ + import a.b + a.b.warn(1) + """ + after = """ + import a.b + a.b.warning(1) + """ + self.assertCodemod( + before, + after, + old_name="a.b.warn", + new_name="a.b.warning", + ) + def test_rename_local_variable(self) -> None: before = """ x = 5 From 0f7766f4510c43b4d3ba90ec48800f9ddbcc4e4e Mon Sep 17 00:00:00 2001 From: Sergii Dymchenko Date: Sat, 26 Aug 2023 02:54:32 -0700 Subject: [PATCH 388/632] Don't gather dirs ending .py (#994) --- libcst/codemod/_cli.py | 5 ++++- libcst/tests/test_e2e.py | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 61e34e1c..3958ef05 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -90,7 +90,10 @@ def gather_files( ret.extend( str(p) for p in Path(fd).rglob("*.py*") - if str(p).endswith("py") or (include_stubs and str(p).endswith("pyi")) + if Path.is_file(p) + and ( + str(p).endswith("py") or (include_stubs and str(p).endswith("pyi")) + ) ) return sorted(ret) diff --git a/libcst/tests/test_e2e.py b/libcst/tests/test_e2e.py index ecdc23ba..219192fb 100644 --- a/libcst/tests/test_e2e.py +++ b/libcst/tests/test_e2e.py @@ -48,6 +48,9 @@ class ToolE2ETest(TestCase): # File that should not be modified other = tmp / "other.py" other.touch() + # Just a dir named "dir.py", should be ignored + adir = tmp / "dir.py" + adir.mkdir() # Run command command_instance = PrintToPPrintCommand(CodemodContext()) From b8a644bc5814dc891dc3d544b6c90fe725ccdb10 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 26 Aug 2023 11:56:02 +0100 Subject: [PATCH 389/632] Bump pypa/cibuildwheel from 2.13.0 to 2.15.0 (#987) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.13.0 to 2.15.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.13.0...v2.15.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 60a0e16b..468079c7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -58,7 +58,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.13.0 + uses: pypa/cibuildwheel@v2.15.0 - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl From 43e21c8d71aa3348bc932cdf1dc2f3d9fd548ec1 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 26 Aug 2023 12:23:13 +0100 Subject: [PATCH 390/632] ci: enable testing on windows + 3.11 (#998) --- .github/workflows/ci.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aad99e79..4d9ba0be 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,10 +15,6 @@ jobs: matrix: os: [macos-latest, ubuntu-latest, windows-latest] python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] - exclude: - # skip these builds until https://bugs.launchpad.net/lxml/+bug/1977998 is resolved - - os: windows-latest - python-version: "3.11" steps: - uses: actions/checkout@v3 with: From cbfd9c30a33cf80cf150a686b89f597fd5d21559 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 26 Aug 2023 12:42:12 +0100 Subject: [PATCH 391/632] drop support for Python 3.7 (#997) --- .github/workflows/ci.yml | 2 +- libcst/_add_slots.py | 17 ++++------------- pyproject.toml | 20 ++++++-------------- 3 files changed, 11 insertions(+), 28 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4d9ba0be..d062113e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,7 +14,7 @@ jobs: fail-fast: false matrix: os: [macos-latest, ubuntu-latest, windows-latest] - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11"] steps: - uses: actions/checkout@v3 with: diff --git a/libcst/_add_slots.py b/libcst/_add_slots.py index bbe2c634..7012ce1a 100644 --- a/libcst/_add_slots.py +++ b/libcst/_add_slots.py @@ -38,19 +38,10 @@ def add_slots(cls: Type[_T]) -> Type[_T]: # Create the class. qualname = getattr(cls, "__qualname__", None) - try: - # GenericMeta in py3.6 requires us to track __orig_bases__. This is fixed in py3.7 - # by the removal of GenericMeta. We should just be able to use cls.__bases__ in the - # future. - bases = getattr(cls, "__orig_bases__", cls.__bases__) - # pyre-fixme[9]: cls has type `Type[Variable[_T]]`; used as `_T`. - # pyre-fixme[19]: Expected 0 positional arguments. - cls = type(cls)(cls.__name__, bases, cls_dict) - except TypeError: - # We're in py3.7 and should use cls.__bases__ - # pyre-fixme[9]: cls has type `Type[Variable[_T]]`; used as `_T`. - # pyre-fixme[19]: Expected 0 positional arguments. - cls = type(cls)(cls.__name__, cls.__bases__, cls_dict) + + # pyre-fixme[9]: cls has type `Type[Variable[_T]]`; used as `_T`. + # pyre-fixme[19]: Expected 0 positional arguments. + cls = type(cls)(cls.__name__, cls.__bases__, cls_dict) if qualname is not None: cls.__qualname__ = qualname diff --git a/pyproject.toml b/pyproject.toml index 59ce2602..abec5ac8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,19 +6,17 @@ name = "libcst" description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." readme = "README.rst" dynamic = ["version"] -license = {file="LICENSE"} +license = { file = "LICENSE" } classifiers = [ "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ] -requires-python = ">=3.7" +requires-python = ">=3.8" dependencies = [ - "dataclasses>=0.6.0; python_version < '3.7'", "typing_extensions>=3.7.4.2", "typing_inspect>=0.4.0", "pyyaml>=5.2", @@ -54,18 +52,15 @@ Github = "https://github.com/Instagram/LibCST" Changelog = "https://github.com/Instagram/LibCST/blob/main/CHANGELOG.md" [tool.black] -target-version = ["py37"] -extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format +target-version = ["py38"] +extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format [tool.hatch.envs.default] features = ["dev"] [tool.hatch.envs.default.scripts] docs = "sphinx-build -ab html docs/source docs/build" -fixtures = [ - "python scripts/regenerate-fixtures.py", - "git diff --exit-code", -] +fixtures = ["python scripts/regenerate-fixtures.py", "git diff --exit-code"] format = "ufmt format libcst scripts" lint = [ "flake8 libcst", @@ -74,10 +69,7 @@ lint = [ "python scripts/check_copyright.py", ] test = "python -m libcst.tests" -typecheck = [ - "pyre --version", - "pyre check", -] +typecheck = ["pyre --version", "pyre check"] [tool.slotscheck] exclude-modules = '^libcst\.(testing|tests)' From 125f9c321b8d386b60747cfaf3902d8d7167d1e0 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 26 Aug 2023 13:06:27 +0100 Subject: [PATCH 392/632] ci: fix rust-cache config (#999) --- .github/workflows/ci.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d062113e..1a62e32c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,7 +33,8 @@ jobs: - name: Rust Cache uses: Swatinem/rust-cache@v2.4.0 with: - working-directory: native + shared-key: "shared" + workspaces: native - name: Build LibCST run: hatch env create - name: Tests @@ -144,7 +145,8 @@ jobs: - name: Rust Cache uses: Swatinem/rust-cache@v2.4.0 with: - working-directory: native + shared-key: "shared" + workspaces: native - uses: actions/setup-python@v4 with: cache: pip From 6d11068723d0f942a6caa244ddd83df3ff74bb2b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 26 Aug 2023 13:07:12 +0100 Subject: [PATCH 393/632] Bump black from 23.3.0 to 23.7.0 (#973) Bumps [black](https://github.com/psf/black) from 23.3.0 to 23.7.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.3.0...23.7.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index abec5ac8..4dd449a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ dependencies = [ [project.optional-dependencies] dev = [ - "black==23.3.0", + "black==23.7.0", "coverage>=4.5.4", "build>=0.10.0", "fixit==0.1.1", From e9bad94d5868ba5a3919cc34e26665d40cdf9f91 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 26 Aug 2023 13:07:39 +0100 Subject: [PATCH 394/632] Bump fixit from 0.1.1 to 2.0.0.post1 (#979) Bumps [fixit](https://github.com/Instagram/Fixit) from 0.1.1 to 2.0.0.post1. - [Changelog](https://github.com/Instagram/Fixit/blob/main/CHANGELOG.md) - [Commits](https://github.com/Instagram/Fixit/compare/v0.1.1...v2.0.0.post1) --- updated-dependencies: - dependency-name: fixit dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4dd449a5..4718e149 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dev = [ "black==23.7.0", "coverage>=4.5.4", "build>=0.10.0", - "fixit==0.1.1", + "fixit==2.0.0.post1", "flake8>=3.7.8,<5", "Sphinx>=5.1.1", "hypothesis>=4.36.0", From c2d176162fbbd36f88988baaf57d836a3ed1e6e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 26 Aug 2023 13:29:33 +0100 Subject: [PATCH 395/632] Bump Swatinem/rust-cache from 2.4.0 to 2.6.2 (#990) Bumps [Swatinem/rust-cache](https://github.com/swatinem/rust-cache) from 2.4.0 to 2.6.2. - [Release notes](https://github.com/swatinem/rust-cache/releases) - [Changelog](https://github.com/Swatinem/rust-cache/blob/master/CHANGELOG.md) - [Commits](https://github.com/swatinem/rust-cache/compare/v2.4.0...v2.6.2) --- updated-dependencies: - dependency-name: Swatinem/rust-cache dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 468079c7..49c44772 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -50,7 +50,7 @@ jobs: python-version: "3.10" - name: Rust Cache if: ${{ !contains(matrix.os, 'self-hosted') }} - uses: Swatinem/rust-cache@v2.4.0 + uses: Swatinem/rust-cache@v2.6.2 with: working-directory: native - name: Disable scmtools local scheme diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1a62e32c..63e9a766 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: with: toolchain: stable - name: Rust Cache - uses: Swatinem/rust-cache@v2.4.0 + uses: Swatinem/rust-cache@v2.6.2 with: shared-key: "shared" workspaces: native @@ -143,7 +143,7 @@ jobs: toolchain: stable components: rustfmt, clippy - name: Rust Cache - uses: Swatinem/rust-cache@v2.4.0 + uses: Swatinem/rust-cache@v2.6.2 with: shared-key: "shared" workspaces: native From b28777e9e57451ada13bc0d4f98424796d47afb3 Mon Sep 17 00:00:00 2001 From: Tom Forbes Date: Sat, 26 Aug 2023 13:34:27 +0100 Subject: [PATCH 396/632] Remove criterion-cycles-per-byte dependency and related benchmark measurement (#995) --- .github/workflows/ci.yml | 5 + native/Cargo.lock | 170 ++-------------------- native/libcst/Cargo.toml | 1 - native/libcst/benches/parser_benchmark.rs | 3 +- 4 files changed, 15 insertions(+), 164 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 63e9a766..9945e4aa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -168,6 +168,11 @@ jobs: with: token: ${{ secrets.GITHUB_TOKEN }} args: --manifest-path=native/Cargo.toml --all-features + - name: compile-benchmarks + uses: actions-rs/cargo@v1 + with: + command: bench + args: --manifest-path=native/Cargo.toml --no-run rustfmt: name: Rustfmt diff --git a/native/Cargo.lock b/native/Cargo.lock index 92ba4b04..181ec5b0 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -46,33 +46,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" -[[package]] -name = "bstr" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata", - "serde", -] - [[package]] name = "bumpalo" version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" -[[package]] -name = "cast" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c24dab4283a142afa2fdca129b80ad2c6284e073930f964c3a1293c225ee39a" -dependencies = [ - "rustc_version", -] - [[package]] name = "cast" version = "0.3.0" @@ -121,17 +100,6 @@ dependencies = [ "half", ] -[[package]] -name = "clap" -version = "2.34.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" -dependencies = [ - "bitflags", - "textwrap 0.11.0", - "unicode-width", -] - [[package]] name = "clap" version = "3.2.22" @@ -141,7 +109,7 @@ dependencies = [ "bitflags", "clap_lex", "indexmap", - "textwrap 0.15.1", + "textwrap", ] [[package]] @@ -153,32 +121,6 @@ dependencies = [ "os_str_bytes", ] -[[package]] -name = "criterion" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" -dependencies = [ - "atty", - "cast 0.3.0", - "clap 2.34.0", - "criterion-plot 0.4.4", - "csv", - "itertools", - "lazy_static", - "num-traits", - "oorandom", - "plotters", - "rayon", - "regex", - "serde", - "serde_cbor", - "serde_derive", - "serde_json", - "tinytemplate", - "walkdir", -] - [[package]] name = "criterion" version = "0.4.0" @@ -187,10 +129,10 @@ checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" dependencies = [ "anes", "atty", - "cast 0.3.0", + "cast", "ciborium", - "clap 3.2.22", - "criterion-plot 0.5.0", + "clap", + "criterion-plot", "itertools", "lazy_static", "num-traits", @@ -205,32 +147,13 @@ dependencies = [ "walkdir", ] -[[package]] -name = "criterion-cycles-per-byte" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d34485a578330c7a91ccf064674f3739a7aebbf3b9d7fd498a6d3e8f7473c96" -dependencies = [ - "criterion 0.3.6", -] - -[[package]] -name = "criterion-plot" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00996de9f2f7559f7f4dc286073197f83e92256a59ed395f9aac01fe717da57" -dependencies = [ - "cast 0.2.7", - "itertools", -] - [[package]] name = "criterion-plot" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ - "cast 0.3.0", + "cast", "itertools", ] @@ -279,28 +202,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "csv" -version = "1.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" -dependencies = [ - "bstr", - "csv-core", - "itoa 0.4.8", - "ryu", - "serde", -] - -[[package]] -name = "csv-core" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" -dependencies = [ - "memchr", -] - [[package]] name = "difference" version = "2.0.0" @@ -374,12 +275,6 @@ dependencies = [ "either", ] -[[package]] -name = "itoa" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" - [[package]] name = "itoa" version = "1.0.2" @@ -403,17 +298,16 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.126" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "libcst" version = "0.1.0" dependencies = [ "chic", - "criterion 0.4.0", - "criterion-cycles-per-byte", + "criterion", "difference", "itertools", "libcst_derive", @@ -713,27 +607,12 @@ dependencies = [ "regex-syntax", ] -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" - [[package]] name = "regex-syntax" version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" -[[package]] -name = "rustc_version" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver", -] - [[package]] name = "ryu" version = "1.0.10" @@ -755,12 +634,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" -[[package]] -name = "semver" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a41d061efea015927ac527063765e73601444cdc344ba855bc7bd44578b25e1c" - [[package]] name = "serde" version = "1.0.145" @@ -770,16 +643,6 @@ dependencies = [ "serde_derive", ] -[[package]] -name = "serde_cbor" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" -dependencies = [ - "half", - "serde", -] - [[package]] name = "serde_derive" version = "1.0.145" @@ -797,7 +660,7 @@ version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" dependencies = [ - "itoa 1.0.2", + "itoa", "ryu", "serde", ] @@ -834,15 +697,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "textwrap" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "unicode-width", -] - [[package]] name = "textwrap" version = "0.15.1" @@ -909,12 +763,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" -[[package]] -name = "unicode-width" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" - [[package]] name = "unindent" version = "0.1.9" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 22440666..4f0b848a 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -41,7 +41,6 @@ libcst_derive = { path = "../libcst_derive" } [dev-dependencies] criterion = { version = "0.4.0", features = ["html_reports"] } -criterion-cycles-per-byte = "0.1" difference = "2.0.0" [[bench]] diff --git a/native/libcst/benches/parser_benchmark.rs b/native/libcst/benches/parser_benchmark.rs index 53ab2e17..0b9849a4 100644 --- a/native/libcst/benches/parser_benchmark.rs +++ b/native/libcst/benches/parser_benchmark.rs @@ -11,7 +11,6 @@ use std::{ use criterion::{ black_box, criterion_group, criterion_main, measurement::Measurement, BatchSize, Criterion, }; -use criterion_cycles_per_byte::CyclesPerByte; use itertools::Itertools; use libcst_native::{ parse_module, parse_tokens_without_whitespace, tokenize, Codegen, Config, Inflate, @@ -120,7 +119,7 @@ pub fn parse_into_cst_benchmarks(c: &mut Criterion) { criterion_group!( name=benches; - config = Criterion::default().with_measurement(CyclesPerByte); + config=Criterion::default(); targets=parser_benchmarks, codegen_benchmarks, inflate_benchmarks, tokenize_benchmarks, parse_into_cst_benchmarks ); criterion_main!(benches); From 75b6331d55fe450c81c99fb9af22df937f5438b4 Mon Sep 17 00:00:00 2001 From: Tom Forbes Date: Sat, 26 Aug 2023 15:21:05 +0100 Subject: [PATCH 397/632] Switch to using thread_local regular expressions to avoid regex mutex contention (#996) --- native/Cargo.lock | 258 +++++++++++++----- native/libcst/Cargo.toml | 10 +- native/libcst/benches/parser_benchmark.rs | 53 +++- native/libcst/src/parser/numbers.rs | 50 ++-- native/libcst/src/tokenizer/core/mod.rs | 61 +++-- native/libcst/src/tokenizer/operators.rs | 7 +- .../libcst/src/tokenizer/text_position/mod.rs | 7 +- .../libcst/src/tokenizer/whitespace_parser.rs | 37 ++- 8 files changed, 326 insertions(+), 157 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 181ec5b0..06a718fd 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "aho-corasick" -version = "0.7.18" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a" dependencies = [ "memchr", ] @@ -24,15 +24,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7021ce4924a3f25f802b2cccd1af585e39ea1a363a1aa2e72afe54b67a3a7a7" [[package]] -name = "atty" -version = "0.2.14" +name = "anstyle" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi", - "libc", - "winapi", -] +checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea" [[package]] name = "autocfg" @@ -46,6 +41,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" + [[package]] name = "bumpalo" version = "3.12.0" @@ -58,6 +59,15 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -102,40 +112,44 @@ dependencies = [ [[package]] name = "clap" -version = "3.2.22" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86447ad904c7fb335a790c9d7fe3d0d971dc523b8ccd1561a520de9a85302750" +checksum = "1d5f1946157a96594eb2d2c10eb7ad9a2b27518cb3000209dec700c35df9197d" dependencies = [ - "bitflags", + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78116e32a042dd73c2901f0dc30790d20ff3447f3e3472fad359e8c3d282bcd6" +dependencies = [ + "anstyle", "clap_lex", - "indexmap", - "textwrap", ] [[package]] name = "clap_lex" -version = "0.2.4" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" -dependencies = [ - "os_str_bytes", -] +checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" [[package]] name = "criterion" -version = "0.4.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" dependencies = [ "anes", - "atty", "cast", "ciborium", "clap", "criterion-plot", - "itertools", - "lazy_static", + "is-terminal", + "itertools 0.10.5", "num-traits", + "once_cell", "oorandom", "plotters", "rayon", @@ -154,7 +168,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools", + "itertools 0.10.5", ] [[package]] @@ -214,6 +228,27 @@ version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +[[package]] +name = "errno" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "glob" version = "0.3.0" @@ -226,12 +261,6 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - [[package]] name = "hermit-abi" version = "0.1.19" @@ -242,14 +271,10 @@ dependencies = [ ] [[package]] -name = "indexmap" -version = "1.9.1" +name = "hermit-abi" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" -dependencies = [ - "autocfg", - "hashbrown", -] +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" [[package]] name = "indoc" @@ -266,6 +291,17 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi 0.3.2", + "rustix", + "windows-sys", +] + [[package]] name = "itertools" version = "0.10.5" @@ -275,6 +311,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.2" @@ -309,12 +354,12 @@ dependencies = [ "chic", "criterion", "difference", - "itertools", + "itertools 0.11.0", "libcst_derive", - "once_cell", "paste", "peg", "pyo3", + "rayon", "regex", "thiserror", ] @@ -328,6 +373,12 @@ dependencies = [ "trybuild", ] +[[package]] +name = "linux-raw-sys" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" + [[package]] name = "lock_api" version = "0.4.7" @@ -377,7 +428,7 @@ version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" dependencies = [ - "hermit-abi", + "hermit-abi 0.1.19", "libc", ] @@ -393,12 +444,6 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" -[[package]] -name = "os_str_bytes" -version = "6.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" - [[package]] name = "parking_lot" version = "0.11.2" @@ -565,21 +610,19 @@ dependencies = [ [[package]] name = "rayon" -version = "1.5.3" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" +checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" dependencies = [ - "autocfg", - "crossbeam-deque", "either", "rayon-core", ] [[package]] name = "rayon-core" -version = "1.9.3" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" +checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -593,14 +636,26 @@ version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "regex" -version = "1.7.0" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ "aho-corasick", "memchr", @@ -609,9 +664,22 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.27" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" + +[[package]] +name = "rustix" +version = "0.38.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bfe0f2582b4931a45d1fa608f8a8722e8b3c7ac54dd6d5f3b3212791fedef49" +dependencies = [ + "bitflags 2.4.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] [[package]] name = "ryu" @@ -697,12 +765,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "textwrap" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "949517c0cf1bf4ee812e2e07e08ab448e3ae0d23472aee8a06c985f0c8815b16" - [[package]] name = "thiserror" version = "1.0.37" @@ -874,3 +936,69 @@ name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 4f0b848a..6c842a39 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -8,7 +8,7 @@ name = "libcst" version = "0.1.0" authors = ["LibCST Developers"] edition = "2018" -rust-version = "1.53" +rust-version = "1.70" [lib] name = "libcst_native" @@ -34,14 +34,14 @@ pyo3 = { version = ">=0.17", optional = true } thiserror = "1.0.37" peg = "0.8.1" chic = "1.2.2" -itertools = "0.10.5" -once_cell = "1.16.0" -regex = "1.7.0" +regex = "1.9.3" libcst_derive = { path = "../libcst_derive" } [dev-dependencies] -criterion = { version = "0.4.0", features = ["html_reports"] } +criterion = { version = "0.5.1", features = ["html_reports"] } difference = "2.0.0" +rayon = "1.7.0" +itertools = "0.11.0" [[bench]] name = "parser_benchmark" diff --git a/native/libcst/benches/parser_benchmark.rs b/native/libcst/benches/parser_benchmark.rs index 0b9849a4..4987022a 100644 --- a/native/libcst/benches/parser_benchmark.rs +++ b/native/libcst/benches/parser_benchmark.rs @@ -9,9 +9,12 @@ use std::{ }; use criterion::{ - black_box, criterion_group, criterion_main, measurement::Measurement, BatchSize, Criterion, + black_box, criterion_group, criterion_main, measurement::Measurement, BatchSize, BenchmarkId, + Criterion, Throughput, }; use itertools::Itertools; +use rayon::prelude::*; + use libcst_native::{ parse_module, parse_tokens_without_whitespace, tokenize, Codegen, Config, Inflate, }; @@ -21,7 +24,7 @@ const NEWLINE: &str = "\n"; #[cfg(windows)] const NEWLINE: &str = "\r\n"; -fn load_all_fixtures() -> String { +fn load_all_fixtures_vec() -> Vec { let mut path = PathBuf::from(file!()); path.pop(); path.pop(); @@ -42,7 +45,11 @@ fn load_all_fixtures() -> String { let path = file.unwrap().path(); std::fs::read_to_string(&path).expect("reading_file") }) - .join(NEWLINE) + .collect() +} + +fn load_all_fixtures() -> String { + load_all_fixtures_vec().join(NEWLINE) } pub fn inflate_benchmarks(c: &mut Criterion) { @@ -117,9 +124,47 @@ pub fn parse_into_cst_benchmarks(c: &mut Criterion) { group.finish(); } +pub fn parse_into_cst_multithreaded_benchmarks( + c: &mut Criterion, +) where + ::Value: Send, +{ + let fixtures = load_all_fixtures_vec(); + let mut group = c.benchmark_group("parse_into_cst_parallel"); + group.measurement_time(Duration::from_secs(15)); + group.warm_up_time(Duration::from_secs(5)); + + for thread_count in 1..10 { + let expanded_fixtures = (0..thread_count) + .flat_map(|_| fixtures.clone()) + .collect_vec(); + group.throughput(Throughput::Elements(expanded_fixtures.len() as u64)); + group.bench_with_input( + BenchmarkId::from_parameter(thread_count), + &thread_count, + |b, thread_count| { + let thread_pool = rayon::ThreadPoolBuilder::new() + .num_threads(*thread_count) + .build() + .unwrap(); + thread_pool.install(|| { + b.iter_with_large_drop(|| { + expanded_fixtures + .par_iter() + .map(|contents| black_box(parse_module(&contents, None))) + .collect::>() + }); + }); + }, + ); + } + + group.finish(); +} + criterion_group!( name=benches; config=Criterion::default(); - targets=parser_benchmarks, codegen_benchmarks, inflate_benchmarks, tokenize_benchmarks, parse_into_cst_benchmarks + targets=parser_benchmarks, codegen_benchmarks, inflate_benchmarks, tokenize_benchmarks, parse_into_cst_benchmarks, parse_into_cst_multithreaded_benchmarks ); criterion_main!(benches); diff --git a/native/libcst/src/parser/numbers.rs b/native/libcst/src/parser/numbers.rs index 6d7a0d8e..95db532b 100644 --- a/native/libcst/src/parser/numbers.rs +++ b/native/libcst/src/parser/numbers.rs @@ -3,7 +3,6 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree -use once_cell::sync::Lazy; use regex::Regex; use crate::nodes::deflated::{Expression, Float, Imaginary, Integer}; @@ -13,51 +12,48 @@ static BIN: &str = r"0[bB](?:_?[01])+"; static OCT: &str = r"0[oO](?:_?[0-7])+"; static DECIMAL: &str = r"(?:0(?:_?0)*|[1-9](?:_?[0-9])*)"; -static INTEGER_RE: Lazy = Lazy::new(|| { - Regex::new(format!("^({}|{}|{}|{})$", HEX, BIN, OCT, DECIMAL).as_str()).expect("regex") -}); - static EXPONENT: &str = r"[eE][-+]?[0-9](?:_?[0-9])*"; // Note: these don't exactly match the python implementation (exponent is not included) static POINT_FLOAT: &str = r"([0-9](?:_?[0-9])*\.(?:[0-9](?:_?[0-9])*)?|\.[0-9](?:_?[0-9])*)"; static EXP_FLOAT: &str = r"[0-9](?:_?[0-9])*"; -static FLOAT_RE: Lazy = Lazy::new(|| { - Regex::new( - format!( - "^({}({})?|{}{})$", - POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT +thread_local! { + static INTEGER_RE: Regex = + Regex::new(format!("^({}|{}|{}|{})$", HEX, BIN, OCT, DECIMAL).as_str()).expect("regex"); + static FLOAT_RE: Regex = + Regex::new( + format!( + "^({}({})?|{}{})$", + POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT + ) + .as_str(), ) - .as_str(), - ) - .expect("regex") -}); - -static IMAGINARY_RE: Lazy = Lazy::new(|| { - Regex::new( - format!( - r"^([0-9](?:_?[0-9])*[jJ]|({}({})?|{}{})[jJ])$", - POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT + .expect("regex"); + static IMAGINARY_RE: Regex = + Regex::new( + format!( + r"^([0-9](?:_?[0-9])*[jJ]|({}({})?|{}{})[jJ])$", + POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT + ) + .as_str(), ) - .as_str(), - ) - .expect("regex") -}); + .expect("regex"); +} pub(crate) fn parse_number(raw: &str) -> Expression { - if INTEGER_RE.is_match(raw) { + if INTEGER_RE.with(|r| r.is_match(raw)) { Expression::Integer(Box::new(Integer { value: raw, lpar: Default::default(), rpar: Default::default(), })) - } else if FLOAT_RE.is_match(raw) { + } else if FLOAT_RE.with(|r| r.is_match(raw)) { Expression::Float(Box::new(Float { value: raw, lpar: Default::default(), rpar: Default::default(), })) - } else if IMAGINARY_RE.is_match(raw) { + } else if IMAGINARY_RE.with(|r| r.is_match(raw)) { Expression::Imaginary(Box::new(Imaginary { value: raw, lpar: Default::default(), diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 359451a3..2365eaa3 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -58,7 +58,6 @@ /// [RustPython's parser]: https://crates.io/crates/rustpython-parser mod string_types; -use once_cell::sync::Lazy; use regex::Regex; use std::cell::RefCell; use std::cmp::Ordering; @@ -83,25 +82,27 @@ const MAX_INDENT: usize = 100; // https://github.com/rust-lang/rust/issues/71763 const MAX_CHAR: char = '\u{10ffff}'; -static SPACE_TAB_FORMFEED_RE: Lazy = Lazy::new(|| Regex::new(r"\A[ \f\t]+").expect("regex")); -static ANY_NON_NEWLINE_RE: Lazy = Lazy::new(|| Regex::new(r"\A[^\r\n]+").expect("regex")); -static STRING_PREFIX_RE: Lazy = - Lazy::new(|| Regex::new(r"\A(?i)(u|[bf]r|r[bf]|r|b|f)").expect("regex")); -static POTENTIAL_IDENTIFIER_TAIL_RE: Lazy = - Lazy::new(|| Regex::new(r"\A([a-zA-Z0-9_]|[^\x00-\x7f])+").expect("regex")); -static DECIMAL_DOT_DIGIT_RE: Lazy = Lazy::new(|| Regex::new(r"\A\.[0-9]").expect("regex")); -static DECIMAL_TAIL_RE: Lazy = - Lazy::new(|| Regex::new(r"\A[0-9](_?[0-9])*").expect("regex")); -static HEXADECIMAL_TAIL_RE: Lazy = - Lazy::new(|| Regex::new(r"\A(_?[0-9a-fA-F])+").expect("regex")); -static OCTAL_TAIL_RE: Lazy = Lazy::new(|| Regex::new(r"\A(_?[0-7])+").expect("regex")); -static BINARY_TAIL_RE: Lazy = Lazy::new(|| Regex::new(r"\A(_?[01])+").expect("regex")); +thread_local! { + static SPACE_TAB_FORMFEED_RE: Regex = Regex::new(r"\A[ \f\t]+").expect("regex"); + static ANY_NON_NEWLINE_RE: Regex = Regex::new(r"\A[^\r\n]+").expect("regex"); + static STRING_PREFIX_RE: Regex = + Regex::new(r"\A(?i)(u|[bf]r|r[bf]|r|b|f)").expect("regex"); + static POTENTIAL_IDENTIFIER_TAIL_RE: Regex = + Regex::new(r"\A([a-zA-Z0-9_]|[^\x00-\x7f])+").expect("regex"); + static DECIMAL_DOT_DIGIT_RE: Regex = Regex::new(r"\A\.[0-9]").expect("regex"); + static DECIMAL_TAIL_RE: Regex = + Regex::new(r"\A[0-9](_?[0-9])*").expect("regex"); + static HEXADECIMAL_TAIL_RE: Regex = + Regex::new(r"\A(_?[0-9a-fA-F])+").expect("regex"); + static OCTAL_TAIL_RE: Regex = Regex::new(r"\A(_?[0-7])+").expect("regex"); + static BINARY_TAIL_RE: Regex = Regex::new(r"\A(_?[01])+").expect("regex"); -/// Used to verify identifiers when there's a non-ascii character in them. -// This changes across unicode revisions. We'd need to ship our own unicode tables to 100% match a -// given Python version's behavior. -static UNICODE_IDENTIFIER_RE: Lazy = - Lazy::new(|| Regex::new(r"\A[\p{XID_Start}_]\p{XID_Continue}*\z").expect("regex")); + /// Used to verify identifiers when there's a non-ascii character in them. + // This changes across unicode revisions. We'd need to ship our own unicode tables to 100% match a + // given Python version's behavior. + static UNICODE_IDENTIFIER_RE: Regex = + Regex::new(r"\A[\p{XID_Start}_]\p{XID_Continue}*\z").expect("regex"); +} #[derive(Debug, Eq, PartialEq, Copy, Clone)] pub enum TokType { @@ -316,11 +317,11 @@ impl<'t> TokState<'t> { 'again: loop { // Skip spaces - self.text_pos.consume(&*SPACE_TAB_FORMFEED_RE); + SPACE_TAB_FORMFEED_RE.with(|v| self.text_pos.consume(v)); // Skip comment, unless it's a type comment if self.text_pos.peek() == Some('#') { - self.text_pos.consume(&*ANY_NON_NEWLINE_RE); + ANY_NON_NEWLINE_RE.with(|v| self.text_pos.consume(v)); // type_comment is not supported } @@ -384,7 +385,7 @@ impl<'t> TokState<'t> { } // Number starting with period - Some('.') if self.text_pos.matches(&*DECIMAL_DOT_DIGIT_RE) => { + Some('.') if DECIMAL_DOT_DIGIT_RE.with(|r| self.text_pos.matches(r)) => { self.consume_number(NumberState::Fraction) } @@ -472,7 +473,7 @@ impl<'t> TokState<'t> { } // Operator - Some(_) if self.text_pos.consume(&*OPERATOR_RE) => Ok(TokType::Op), + Some(_) if OPERATOR_RE.with(|r| self.text_pos.consume(r)) => Ok(TokType::Op), // Bad character // If nothing works, fall back to this error. CPython returns an OP in this case, @@ -623,7 +624,7 @@ impl<'t> TokState<'t> { fn consume_identifier_or_prefixed_string(&mut self) -> Result> { // Process the various legal combinations of b"", r"", u"", and f"". - if self.text_pos.consume(&*STRING_PREFIX_RE) { + if STRING_PREFIX_RE.with(|r| self.text_pos.consume(r)) { if let Some('"') | Some('\'') = self.text_pos.peek() { // We found a string, not an identifier. Bail! if self.split_fstring @@ -645,7 +646,7 @@ impl<'t> TokState<'t> { Some('a'..='z') | Some('A'..='Z') | Some('_') | Some('\u{80}'..=MAX_CHAR) )); } - self.text_pos.consume(&*POTENTIAL_IDENTIFIER_TAIL_RE); + POTENTIAL_IDENTIFIER_TAIL_RE.with(|r| self.text_pos.consume(r)); let identifier_str = self.text_pos.slice_from_start_pos(&self.start_pos); if !verify_identifier(identifier_str) { // TODO: async/await @@ -691,7 +692,7 @@ impl<'t> TokState<'t> { match self.text_pos.peek() { Some('x') | Some('X') => { self.text_pos.next(); - if !self.text_pos.consume(&*HEXADECIMAL_TAIL_RE) + if !HEXADECIMAL_TAIL_RE.with(|r| self.text_pos.consume(r)) || self.text_pos.peek() == Some('_') { Err(TokError::BadHexadecimal) @@ -701,7 +702,7 @@ impl<'t> TokState<'t> { } Some('o') | Some('O') => { self.text_pos.next(); - if !self.text_pos.consume(&*OCTAL_TAIL_RE) + if !OCTAL_TAIL_RE.with(|r| self.text_pos.consume(r)) || self.text_pos.peek() == Some('_') { return Err(TokError::BadOctal); @@ -715,7 +716,7 @@ impl<'t> TokState<'t> { } Some('b') | Some('B') => { self.text_pos.next(); - if !self.text_pos.consume(&*BINARY_TAIL_RE) + if !BINARY_TAIL_RE.with(|r| self.text_pos.consume(r)) || self.text_pos.peek() == Some('_') { return Err(TokError::BadBinary); @@ -819,7 +820,7 @@ impl<'t> TokState<'t> { /// Processes a decimal tail. This is the bit after the dot or after an E in a float. fn consume_decimal_tail(&mut self) -> Result<(), TokError<'t>> { - let result = self.text_pos.consume(&*DECIMAL_TAIL_RE); + let result = DECIMAL_TAIL_RE.with(|r| self.text_pos.consume(r)); // Assumption: If we've been called, the first character is an integer, so we must have a // regex match debug_assert!(result, "try_decimal_tail was called on a non-digit char"); @@ -1058,7 +1059,7 @@ fn verify_identifier(name: &str) -> bool { // TODO: If `name` is non-ascii, must first normalize name to NFKC. // Common case: If the entire string is ascii, we can avoid the more expensive regex check, // since the tokenizer already validates ascii characters before calling us. - name.is_ascii() || UNICODE_IDENTIFIER_RE.is_match(name) + name.is_ascii() || UNICODE_IDENTIFIER_RE.with(|r| r.is_match(name)) } #[derive(Clone)] diff --git a/native/libcst/src/tokenizer/operators.rs b/native/libcst/src/tokenizer/operators.rs index e5ef1526..3252f774 100644 --- a/native/libcst/src/tokenizer/operators.rs +++ b/native/libcst/src/tokenizer/operators.rs @@ -8,7 +8,6 @@ // code or that we retain the original work's copyright information. // https://docs.python.org/3/license.html#zero-clause-bsd-license-for-code-in-the-python-release-documentation -use once_cell::sync::Lazy; use regex::Regex; /// A list of strings that make up all the possible operators in a specific version of Python. @@ -69,7 +68,8 @@ pub const OPERATORS: &[&str] = &[ "<>", ]; -pub static OPERATOR_RE: Lazy = Lazy::new(|| { +thread_local! { +pub static OPERATOR_RE: Regex = { // sort operators so that we try to match the longest ones first let mut sorted_operators: Box<[&str]> = OPERATORS.into(); sorted_operators.sort_unstable_by_key(|op| usize::MAX - op.len()); @@ -82,4 +82,5 @@ pub static OPERATOR_RE: Lazy = Lazy::new(|| { .join("|") )) .expect("regex") -}); +}; +} diff --git a/native/libcst/src/tokenizer/text_position/mod.rs b/native/libcst/src/tokenizer/text_position/mod.rs index 2e58600a..fece9e3d 100644 --- a/native/libcst/src/tokenizer/text_position/mod.rs +++ b/native/libcst/src/tokenizer/text_position/mod.rs @@ -5,14 +5,15 @@ mod char_width; -use once_cell::sync::Lazy; use regex::Regex; use std::fmt; use crate::tokenizer::debug_utils::EllipsisDebug; use char_width::NewlineNormalizedCharWidths; -static CR_OR_LF_RE: Lazy = Lazy::new(|| Regex::new(r"[\r\n]").expect("regex")); +thread_local! { + static CR_OR_LF_RE: Regex = Regex::new(r"[\r\n]").expect("regex"); +} pub trait TextPattern { fn match_len(&self, text: &str) -> Option; @@ -98,7 +99,7 @@ impl<'t> TextPosition<'t> { match match_len { Some(match_len) => { assert!( - !CR_OR_LF_RE.is_match(&rest_of_text[..match_len]), + !CR_OR_LF_RE.with(|r| r.is_match(&rest_of_text[..match_len])), "matches pattern must not match a newline", ); true diff --git a/native/libcst/src/tokenizer/whitespace_parser.rs b/native/libcst/src/tokenizer/whitespace_parser.rs index f09ce789..be5b7752 100644 --- a/native/libcst/src/tokenizer/whitespace_parser.rs +++ b/native/libcst/src/tokenizer/whitespace_parser.rs @@ -7,7 +7,6 @@ use crate::nodes::{ Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, SimpleWhitespace, TrailingWhitespace, }; -use once_cell::sync::Lazy; use regex::Regex; use thiserror::Error; @@ -15,10 +14,12 @@ use crate::Token; use super::TokType; -static SIMPLE_WHITESPACE_RE: Lazy = - Lazy::new(|| Regex::new(r"\A([ \f\t]|\\(\r\n?|\n))*").expect("regex")); -static NEWLINE_RE: Lazy = Lazy::new(|| Regex::new(r"\A(\r\n?|\n)").expect("regex")); -static COMMENT_RE: Lazy = Lazy::new(|| Regex::new(r"\A#[^\r\n]*").expect("regex")); +thread_local! { + static SIMPLE_WHITESPACE_RE: Regex = Regex::new(r"\A([ \f\t]|\\(\r\n?|\n))*").expect("regex"); +static NEWLINE_RE: Regex = Regex::new(r"\A(\r\n?|\n)").expect("regex"); +static COMMENT_RE: Regex = Regex::new(r"\A#[^\r\n]*").expect("regex"); +static NEWLINE_RE_2: Regex = Regex::new(r"\r\n?|\n").expect("regex"); +} #[allow(clippy::upper_case_acronyms, clippy::enum_variant_names)] #[derive(Error, Debug, PartialEq, Eq)] @@ -73,11 +74,8 @@ impl<'a> Config<'a> { break; } } - let default_newline = Regex::new(r"\r\n?|\n") - .expect("regex") - .find(input) - .map(|m| m.as_str()) - .unwrap_or("\n"); + let default_newline = + NEWLINE_RE_2.with(|r| r.find(input).map(|m| m.as_str()).unwrap_or("\n")); Self { input, @@ -200,9 +198,8 @@ pub fn parse_empty_lines<'a>( } pub fn parse_comment<'a>(config: &Config<'a>, state: &mut State) -> Result>> { - if let Some(comment_match) = - COMMENT_RE.find(config.get_line_after_column(state.line, state.column_byte)?) - { + let newline_after = config.get_line_after_column(state.line, state.column_byte)?; + if let Some(comment_match) = COMMENT_RE.with(|r| r.find(newline_after)) { let comment_str = comment_match.as_str(); advance_this_line( config, @@ -216,9 +213,8 @@ pub fn parse_comment<'a>(config: &Config<'a>, state: &mut State) -> Result(config: &Config<'a>, state: &mut State) -> Result>> { - if let Some(newline_match) = - NEWLINE_RE.find(config.get_line_after_column(state.line, state.column_byte)?) - { + let newline_after = config.get_line_after_column(state.line, state.column_byte)?; + if let Some(newline_match) = NEWLINE_RE.with(|r| r.find(newline_after)) { let newline_str = newline_match.as_str(); advance_this_line( config, @@ -350,10 +346,11 @@ pub fn parse_simple_whitespace<'a>( let capture_ws = |line, col| -> Result<&'a str> { let x = config.get_line_after_column(line, col); let x = x?; - Ok(SIMPLE_WHITESPACE_RE - .find(x) - .expect("SIMPLE_WHITESPACE_RE supports 0-length matches, so it must always match") - .as_str()) + Ok(SIMPLE_WHITESPACE_RE.with(|r| { + r.find(x) + .expect("SIMPLE_WHITESPACE_RE supports 0-length matches, so it must always match") + .as_str() + })) }; let start_offset = state.byte_offset; let mut prev_line: &str; From 2064e200afd62e5e011237d0d82116d60ca12d24 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 27 Aug 2023 09:31:27 +0100 Subject: [PATCH 398/632] Fix readme's Python version requirement --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 4ad4cd67..baba8715 100644 --- a/README.rst +++ b/README.rst @@ -125,7 +125,7 @@ For a more detailed usage example, `see our documentation Installation ------------ -LibCST requires Python 3.7+ and can be easily installed using most common Python +LibCST requires Python 3.8+ and can be easily installed using most common Python packaging tools. We recommend installing the latest stable release from `PyPI `_ with pip: From 3bb5ba5a861fb2c706e0ef0f177a75f35a7e03fd Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 27 Aug 2023 16:27:38 +0100 Subject: [PATCH 399/632] ci: test with 3.12 (#1003) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9945e4aa..f928d012 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,7 +14,7 @@ jobs: fail-fast: false matrix: os: [macos-latest, ubuntu-latest, windows-latest] - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12-dev"] steps: - uses: actions/checkout@v3 with: From 7c09b5d046824309ccb7cc2dec3899f0016f2a3a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 27 Aug 2023 16:29:20 +0100 Subject: [PATCH 400/632] Remove need for regex in TextPosition::matches (#1002) --- native/libcst/src/tokenizer/text_position/mod.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/native/libcst/src/tokenizer/text_position/mod.rs b/native/libcst/src/tokenizer/text_position/mod.rs index fece9e3d..42a7b682 100644 --- a/native/libcst/src/tokenizer/text_position/mod.rs +++ b/native/libcst/src/tokenizer/text_position/mod.rs @@ -11,10 +11,6 @@ use std::fmt; use crate::tokenizer::debug_utils::EllipsisDebug; use char_width::NewlineNormalizedCharWidths; -thread_local! { - static CR_OR_LF_RE: Regex = Regex::new(r"[\r\n]").expect("regex"); -} - pub trait TextPattern { fn match_len(&self, text: &str) -> Option; } @@ -99,7 +95,7 @@ impl<'t> TextPosition<'t> { match match_len { Some(match_len) => { assert!( - !CR_OR_LF_RE.with(|r| r.is_match(&rest_of_text[..match_len])), + !rest_of_text[..match_len].contains(|x| x == '\r' || x == '\n'), "matches pattern must not match a newline", ); true From 9286446f889f1778b8f11451a68107052b2930b3 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 28 Aug 2023 22:07:22 +0100 Subject: [PATCH 401/632] PEP 695 - Type Parameter Syntax (#1004) This PR adds support for parsing and representing Type Parameters and Type Aliases as specified by PEP 695. What's missing are the scope rules, to be implemented in a future PR. Notable (user visible) changes: - new `TypeAlias` CST node, which is a `SmallStatement` - new CST nodes to represent TypeVarLikes: `TypeVar`, `TypeVarTuple`, `ParamSpec` - new helper CST nodes: `TypeParameters` to serve as a container for multiple TypeVarLikes, and `TypeParam` which is a single item in a `TypeParameters` (owning the separating comma) - extended `FunctionDef` and `ClassDef` with an optional `type_parameters` field, as well as `whitespace_after_type_parameters` to own the extra whitespace between type parameters and the following token - these new fields are added after all others to avoid breaking callers passing in fields as positional arguments - in `FunctionDef` and `ClassDef`, `whitespace_after_name` now owns the whitespace before the type parameters if they exist --- libcst/__init__.py | 12 + libcst/_nodes/statement.py | 346 +++++++++++++++++- libcst/_nodes/tests/test_classdef.py | 102 ++++++ libcst/_nodes/tests/test_funcdef.py | 79 +++- libcst/_nodes/tests/test_type_alias.py | 133 +++++++ libcst/_typed_visitor.py | 292 +++++++++++++++ libcst/matchers/__init__.py | 334 +++++++++++++++++ libcst/matchers/_return_types.py | 12 + libcst/tests/test_tool.py | 10 + native/libcst/src/nodes/mod.rs | 10 +- native/libcst/src/nodes/statement.rs | 321 +++++++++++++++- native/libcst/src/parser/grammar.rs | 116 +++++- .../libcst/tests/fixtures/type_parameters.py | 57 +++ 13 files changed, 1792 insertions(+), 32 deletions(-) create mode 100644 libcst/_nodes/tests/test_type_alias.py create mode 100644 native/libcst/tests/fixtures/type_parameters.py diff --git a/libcst/__init__.py b/libcst/__init__.py index ff63033d..2a8e47b3 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -183,6 +183,7 @@ from libcst._nodes.statement import ( MatchValue, NameItem, Nonlocal, + ParamSpec, Pass, Raise, Return, @@ -190,6 +191,11 @@ from libcst._nodes.statement import ( SimpleStatementSuite, Try, TryStar, + TypeAlias, + TypeParam, + TypeParameters, + TypeVar, + TypeVarTuple, While, With, WithItem, @@ -438,4 +444,10 @@ __all__ = [ "VisitorMetadataProvider", "MetadataDependent", "MetadataWrapper", + "TypeVar", + "TypeVarTuple", + "ParamSpec", + "TypeParam", + "TypeParameters", + "TypeAlias", ] diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index de5161fa..a9502da8 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -48,6 +48,7 @@ from libcst._nodes.op import ( AssignEqual, BaseAugOp, BitOr, + Colon, Comma, Dot, ImportStar, @@ -1738,8 +1739,8 @@ class FunctionDef(BaseCompoundStatement): #: Whitespace after the ``def`` keyword and before the function name. whitespace_after_def: SimpleWhitespace = SimpleWhitespace.field(" ") - #: Whitespace after the function name and before the opening parenthesis for - #: the parameters. + #: Whitespace after the function name and before the type parameters or the opening + #: parenthesis for the parameters. whitespace_after_name: SimpleWhitespace = SimpleWhitespace.field("") #: Whitespace after the opening parenthesis for the parameters but before @@ -1750,6 +1751,13 @@ class FunctionDef(BaseCompoundStatement): #: the colon. whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") + #: An optional declaration of type parameters. + type_parameters: Optional["TypeParameters"] = None + + #: Whitespace between the type parameters and the opening parenthesis for the + #: (non-type) parameters. + whitespace_after_type_parameters: SimpleWhitespace = SimpleWhitespace.field("") + def _validate(self) -> None: if len(self.name.lpar) > 0 or len(self.name.rpar) > 0: raise CSTValidationError("Cannot have parens around Name in a FunctionDef.") @@ -1758,6 +1766,15 @@ class FunctionDef(BaseCompoundStatement): "There must be at least one space between 'def' and name." ) + if ( + self.type_parameters is None + and not self.whitespace_after_type_parameters.empty + ): + raise CSTValidationError( + "whitespace_after_type_parameters must be empty if there are no type " + "parameters in FunctionDef" + ) + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "FunctionDef": return FunctionDef( leading_lines=visit_sequence( @@ -1777,6 +1794,15 @@ class FunctionDef(BaseCompoundStatement): whitespace_after_name=visit_required( self, "whitespace_after_name", self.whitespace_after_name, visitor ), + type_parameters=visit_optional( + self, "type_parameters", self.type_parameters, visitor + ), + whitespace_after_type_parameters=visit_required( + self, + "whitespace_after_type_parameters", + self.whitespace_after_type_parameters, + visitor, + ), whitespace_before_params=visit_required( self, "whitespace_before_params", self.whitespace_before_params, visitor ), @@ -1805,6 +1831,10 @@ class FunctionDef(BaseCompoundStatement): self.whitespace_after_def._codegen(state) self.name._codegen(state) self.whitespace_after_name._codegen(state) + type_params = self.type_parameters + if type_params is not None: + type_params._codegen(state) + self.whitespace_after_type_parameters._codegen(state) state.add_token("(") self.whitespace_before_params._codegen(state) self.params._codegen(state) @@ -1866,19 +1896,34 @@ class ClassDef(BaseCompoundStatement): #: Whitespace after the ``class`` keyword and before the class name. whitespace_after_class: SimpleWhitespace = SimpleWhitespace.field(" ") - #: Whitespace after the class name and before the opening parenthesis for - #: the bases and keywords. + #: Whitespace after the class name and before the type parameters or the opening + #: parenthesis for the bases and keywords. whitespace_after_name: SimpleWhitespace = SimpleWhitespace.field("") #: Whitespace after the closing parenthesis or class name and before #: the colon. whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") + #: An optional declaration of type parameters. + type_parameters: Optional["TypeParameters"] = None + + #: Whitespace between type parameters and opening parenthesis for the bases and + #: keywords. + whitespace_after_type_parameters: SimpleWhitespace = SimpleWhitespace.field("") + def _validate_whitespace(self) -> None: if self.whitespace_after_class.empty: raise CSTValidationError( "There must be at least one space between 'class' and name." ) + if ( + self.type_parameters is None + and not self.whitespace_after_type_parameters.empty + ): + raise CSTValidationError( + "whitespace_after_type_parameters must be empty if there are no type" + "parameters in a ClassDef" + ) def _validate_parens(self) -> None: if len(self.name.lpar) > 0 or len(self.name.rpar) > 0: @@ -1921,6 +1966,15 @@ class ClassDef(BaseCompoundStatement): whitespace_after_name=visit_required( self, "whitespace_after_name", self.whitespace_after_name, visitor ), + type_parameters=visit_optional( + self, "type_parameters", self.type_parameters, visitor + ), + whitespace_after_type_parameters=visit_required( + self, + "whitespace_after_type_parameters", + self.whitespace_after_type_parameters, + visitor, + ), lpar=visit_sentinel(self, "lpar", self.lpar, visitor), bases=visit_sequence(self, "bases", self.bases, visitor), keywords=visit_sequence(self, "keywords", self.keywords, visitor), @@ -1945,6 +1999,10 @@ class ClassDef(BaseCompoundStatement): self.whitespace_after_class._codegen(state) self.name._codegen(state) self.whitespace_after_name._codegen(state) + type_params = self.type_parameters + if type_params is not None: + type_params._codegen(state) + self.whitespace_after_type_parameters._codegen(state) lpar = self.lpar if isinstance(lpar, MaybeSentinel): if self.bases or self.keywords: @@ -3476,3 +3534,283 @@ class MatchOr(MatchPattern): pats = self.patterns for idx, pat in enumerate(pats): pat._codegen(state, default_separator=idx + 1 < len(pats)) + + +@add_slots +@dataclass(frozen=True) +class TypeVar(CSTNode): + """ + A simple (non-variadic) type variable. + + Note: this node represents type a variable when declared using PEP-695 syntax. + """ + + #: The name of the type variable. + name: Name + + #: An optional bound on the type. + bound: Optional[BaseExpression] = None + + #: The colon used to separate the name and bound. If not specified, + #: :class:`MaybeSentinel` will be replaced with a colon if there is a bound, + #: otherwise will be left empty. + colon: Union[Colon, MaybeSentinel] = MaybeSentinel.DEFAULT + + def _codegen_impl(self, state: CodegenState) -> None: + with state.record_syntactic_position(self): + self.name._codegen(state) + bound = self.bound + colon = self.colon + if not isinstance(colon, MaybeSentinel): + colon._codegen(state) + else: + if bound is not None: + state.add_token(": ") + + if bound is not None: + bound._codegen(state) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeVar": + return TypeVar( + name=visit_required(self, "name", self.name, visitor), + colon=visit_sentinel(self, "colon", self.colon, visitor), + bound=visit_optional(self, "bound", self.bound, visitor), + ) + + +@add_slots +@dataclass(frozen=True) +class TypeVarTuple(CSTNode): + """ + A variadic type variable. + """ + + #: The name of this type variable. + name: Name + + #: The (optional) whitespace between the star declaring this type variable as + #: variadic, and the variable's name. + whitespace_after_star: SimpleWhitespace = SimpleWhitespace.field("") + + def _codegen_impl(self, state: CodegenState) -> None: + with state.record_syntactic_position(self): + state.add_token("*") + self.whitespace_after_star._codegen(state) + self.name._codegen(state) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeVarTuple": + return TypeVarTuple( + name=visit_required(self, "name", self.name, visitor), + whitespace_after_star=visit_required( + self, "whitespace_after_star", self.whitespace_after_star, visitor + ), + ) + + +@add_slots +@dataclass(frozen=True) +class ParamSpec(CSTNode): + """ + A parameter specification. + + Note: this node represents a parameter specification when declared using PEP-695 + syntax. + """ + + #: The name of this parameter specification. + name: Name + + #: The (optional) whitespace between the double star declaring this type variable as + #: a parameter specification, and the name. + whitespace_after_star: SimpleWhitespace = SimpleWhitespace.field("") + + def _codegen_impl(self, state: CodegenState) -> None: + with state.record_syntactic_position(self): + state.add_token("**") + self.whitespace_after_star._codegen(state) + self.name._codegen(state) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ParamSpec": + return ParamSpec( + name=visit_required(self, "name", self.name, visitor), + whitespace_after_star=visit_required( + self, "whitespace_after_star", self.whitespace_after_star, visitor + ), + ) + + +@add_slots +@dataclass(frozen=True) +class TypeParam(CSTNode): + """ + A single type parameter that is contained in a :class:`TypeParameters` list. + """ + + #: The actual parameter. + param: Union[TypeVar, TypeVarTuple, ParamSpec] + + #: A trailing comma. If one is not provided, :class:`MaybeSentinel` will be replaced + #: with a comma only if a comma is required. + comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT + + def _codegen_impl(self, state: CodegenState, default_comma: bool = False) -> None: + self.param._codegen(state) + comma = self.comma + if isinstance(comma, MaybeSentinel): + if default_comma: + state.add_token(", ") + else: + comma._codegen(state) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeParam": + return TypeParam( + param=visit_required(self, "param", self.param, visitor), + comma=visit_sentinel(self, "comma", self.comma, visitor), + ) + + +@add_slots +@dataclass(frozen=True) +class TypeParameters(CSTNode): + """ + Type parameters when specified with PEP-695 syntax. + + This node captures all specified parameters that are enclosed with square brackets. + """ + + #: The parameters within the square brackets. + params: Sequence[TypeParam] = () + + #: Opening square bracket that marks the start of these parameters. + lbracket: LeftSquareBracket = LeftSquareBracket.field() + #: Closing square bracket that marks the end of these parameters. + rbracket: RightSquareBracket = RightSquareBracket.field() + + def _codegen_impl(self, state: CodegenState) -> None: + self.lbracket._codegen(state) + params_len = len(self.params) + for idx, param in enumerate(self.params): + param._codegen(state, default_comma=idx + 1 < params_len) + self.rbracket._codegen(state) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeParameters": + return TypeParameters( + lbracket=visit_required(self, "lbracket", self.lbracket, visitor), + params=visit_sequence(self, "params", self.params, visitor), + rbracket=visit_required(self, "rbracket", self.rbracket, visitor), + ) + + +@add_slots +@dataclass(frozen=True) +class TypeAlias(BaseSmallStatement): + """ + A type alias statement. + + This node represents the ``type`` statement as specified initially by PEP-695. + Example: ``type ListOrSet[T] = list[T] | set[T]``. + """ + + #: The name being introduced in this statement. + name: Name + + #: Everything on the right hand side of the ``=``. + value: BaseExpression + + #: An optional list of type parameters, specified after the name. + type_parameters: Optional[TypeParameters] = None + + #: Whitespace between the ``type`` soft keyword and the name. + whitespace_after_type: SimpleWhitespace = SimpleWhitespace.field(" ") + + #: Whitespace between the name and the type parameters (if they exist) or the ``=``. + #: If not specified, :class:`MaybeSentinel` will be replaced with a single space if + #: there are no type parameters, otherwise no spaces. + whitespace_after_name: Union[ + SimpleWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT + + #: Whitespace between the type parameters and the ``=``. Always empty if there are + #: no type parameters. If not specified, :class:`MaybeSentinel` will be replaced + #: with a single space if there are type parameters. + whitespace_after_type_parameters: Union[ + SimpleWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT + + #: Whitespace between the ``=`` and the value. + whitespace_after_equals: SimpleWhitespace = SimpleWhitespace.field(" ") + + #: Optional semicolon when this is used in a statement line. This semicolon + #: owns the whitespace on both sides of it when it is used. + semicolon: Union[Semicolon, MaybeSentinel] = MaybeSentinel.DEFAULT + + def _validate(self) -> None: + if ( + self.type_parameters is None + and self.whitespace_after_type_parameters + not in { + SimpleWhitespace(""), + MaybeSentinel.DEFAULT, + } + ): + raise CSTValidationError( + "whitespace_after_type_parameters must be empty when there are no type parameters in a TypeAlias" + ) + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeAlias": + return TypeAlias( + whitespace_after_type=visit_required( + self, "whitespace_after_type", self.whitespace_after_type, visitor + ), + name=visit_required(self, "name", self.name, visitor), + whitespace_after_name=visit_sentinel( + self, "whitespace_after_name", self.whitespace_after_name, visitor + ), + type_parameters=visit_optional( + self, "type_parameters", self.type_parameters, visitor + ), + whitespace_after_type_parameters=visit_sentinel( + self, + "whitespace_after_type_parameters", + self.whitespace_after_type_parameters, + visitor, + ), + whitespace_after_equals=visit_required( + self, "whitespace_after_equals", self.whitespace_after_equals, visitor + ), + value=visit_required(self, "value", self.value, visitor), + semicolon=visit_sentinel(self, "semicolon", self.semicolon, visitor), + ) + + def _codegen_impl( + self, state: CodegenState, default_semicolon: bool = False + ) -> None: + with state.record_syntactic_position(self): + state.add_token("type") + self.whitespace_after_type._codegen(state) + self.name._codegen(state) + ws_after_name = self.whitespace_after_name + if isinstance(ws_after_name, MaybeSentinel): + if self.type_parameters is None: + state.add_token(" ") + else: + ws_after_name._codegen(state) + + ws_after_type_params = self.whitespace_after_type_parameters + if self.type_parameters is not None: + self.type_parameters._codegen(state) + if isinstance(ws_after_type_params, MaybeSentinel): + state.add_token(" ") + else: + ws_after_type_params._codegen(state) + + state.add_token("=") + self.whitespace_after_equals._codegen(state) + self.value._codegen(state) + + semi = self.semicolon + if isinstance(semi, MaybeSentinel): + if default_semicolon: + state.add_token("; ") + else: + semi._codegen(state) diff --git a/libcst/_nodes/tests/test_classdef.py b/libcst/_nodes/tests/test_classdef.py index db582dce..cca36fbb 100644 --- a/libcst/_nodes/tests/test_classdef.py +++ b/libcst/_nodes/tests/test_classdef.py @@ -8,6 +8,7 @@ from typing import Any, Callable import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -112,6 +113,107 @@ class ClassDefCreationTest(CSTNodeTest): def test_valid(self, **kwargs: Any) -> None: self.validate_node(**kwargs) + @data_provider( + ( + { + "node": cst.ClassDef( + cst.Name("Foo"), + cst.SimpleStatementSuite((cst.Pass(),)), + type_parameters=cst.TypeParameters( + ( + cst.TypeParam( + cst.TypeVar( + cst.Name("T"), + bound=cst.Name("int"), + colon=cst.Colon( + whitespace_after=cst.SimpleWhitespace(" ") + ), + ), + cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), + ), + cst.TypeParam( + cst.TypeVarTuple(cst.Name("Ts")), + cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), + ), + cst.TypeParam(cst.ParamSpec(cst.Name("KW"))), + ) + ), + ), + "code": "class Foo[T: int, *Ts, **KW]: pass\n", + }, + { + "node": cst.ClassDef( + cst.Name("Foo"), + cst.SimpleStatementSuite((cst.Pass(),)), + type_parameters=cst.TypeParameters( + params=( + cst.TypeParam( + param=cst.TypeVar( + cst.Name("T"), + bound=cst.Name("str"), + colon=cst.Colon( + whitespace_before=cst.SimpleWhitespace(" "), + whitespace_after=cst.ParenthesizedWhitespace( + empty_lines=(cst.EmptyLine(),), + indent=True, + ), + ), + ), + comma=cst.Comma(cst.SimpleWhitespace(" ")), + ), + cst.TypeParam( + cst.ParamSpec( + cst.Name("PS"), cst.SimpleWhitespace(" ") + ), + cst.Comma(cst.SimpleWhitespace(" ")), + ), + ) + ), + whitespace_after_type_parameters=cst.SimpleWhitespace(" "), + ), + "code": "class Foo[T :\n\nstr ,** PS ,] : pass\n", + }, + { + "node": cst.ClassDef( + cst.Name("Foo"), + cst.SimpleStatementSuite((cst.Pass(),)), + type_parameters=cst.TypeParameters( + params=( + cst.TypeParam( + param=cst.TypeVar( + cst.Name("T"), + bound=cst.Name("str"), + colon=cst.Colon( + whitespace_before=cst.SimpleWhitespace(" "), + whitespace_after=cst.ParenthesizedWhitespace( + empty_lines=(cst.EmptyLine(),), + indent=True, + ), + ), + ), + comma=cst.Comma(cst.SimpleWhitespace(" ")), + ), + cst.TypeParam( + cst.ParamSpec( + cst.Name("PS"), cst.SimpleWhitespace(" ") + ), + cst.Comma(cst.SimpleWhitespace(" ")), + ), + ) + ), + lpar=cst.LeftParen(), + rpar=cst.RightParen(), + whitespace_after_type_parameters=cst.SimpleWhitespace(" "), + ), + "code": "class Foo[T :\n\nstr ,** PS ,] (): pass\n", + }, + ) + ) + def test_valid_native(self, **kwargs: Any) -> None: + if not is_native(): + self.skipTest("Disabled for pure python parser") + self.validate_node(**kwargs) + @data_provider( ( # Basic parenthesis tests. diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 250030c4..087dde19 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -811,11 +811,88 @@ class FunctionDefCreationTest(CSTNodeTest): "parser": parse_statement, "code": "def foo(*a: *tuple[int,*Ts,...]): pass\n", }, + # Single type variable + { + "node": cst.FunctionDef( + cst.Name("foo"), + cst.Parameters(), + cst.SimpleStatementSuite((cst.Pass(),)), + type_parameters=cst.TypeParameters( + (cst.TypeParam(cst.TypeVar(cst.Name("T"))),) + ), + ), + "code": "def foo[T](): pass\n", + "parser": parse_statement, + }, + # All the type parameters + { + "node": cst.FunctionDef( + cst.Name("foo"), + cst.Parameters(), + cst.SimpleStatementSuite((cst.Pass(),)), + type_parameters=cst.TypeParameters( + ( + cst.TypeParam( + cst.TypeVar( + cst.Name("T"), + bound=cst.Name("int"), + colon=cst.Colon( + whitespace_after=cst.SimpleWhitespace(" ") + ), + ), + cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), + ), + cst.TypeParam( + cst.TypeVarTuple(cst.Name("Ts")), + cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), + ), + cst.TypeParam(cst.ParamSpec(cst.Name("KW"))), + ) + ), + ), + "code": "def foo[T: int, *Ts, **KW](): pass\n", + "parser": parse_statement, + }, + # Type parameters with whitespace + { + "node": cst.FunctionDef( + cst.Name("foo"), + cst.Parameters(), + cst.SimpleStatementSuite((cst.Pass(),)), + type_parameters=cst.TypeParameters( + params=( + cst.TypeParam( + param=cst.TypeVar( + cst.Name("T"), + bound=cst.Name("str"), + colon=cst.Colon( + whitespace_before=cst.SimpleWhitespace(" "), + whitespace_after=cst.ParenthesizedWhitespace( + empty_lines=(cst.EmptyLine(),), + indent=True, + ), + ), + ), + comma=cst.Comma(cst.SimpleWhitespace(" ")), + ), + cst.TypeParam( + cst.ParamSpec( + cst.Name("PS"), cst.SimpleWhitespace(" ") + ), + cst.Comma(cst.SimpleWhitespace(" ")), + ), + ) + ), + whitespace_after_type_parameters=cst.SimpleWhitespace(" "), + ), + "code": "def foo[T :\n\nstr ,** PS ,] (): pass\n", + "parser": parse_statement, + }, ) ) def test_valid_native(self, **kwargs: Any) -> None: if not is_native(): - self.skipTest("Disabled for native parser") + self.skipTest("Disabled for pure python parser") self.validate_node(**kwargs) @data_provider( diff --git a/libcst/_nodes/tests/test_type_alias.py b/libcst/_nodes/tests/test_type_alias.py new file mode 100644 index 00000000..11fd1123 --- /dev/null +++ b/libcst/_nodes/tests/test_type_alias.py @@ -0,0 +1,133 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Any + +import libcst as cst +from libcst import parse_statement +from libcst._nodes.tests.base import CSTNodeTest +from libcst._parser.entrypoints import is_native +from libcst.metadata import CodeRange +from libcst.testing.utils import data_provider + + +class TypeAliasCreationTest(CSTNodeTest): + @data_provider( + ( + { + "node": cst.TypeAlias( + cst.Name("foo"), + cst.Name("bar"), + ), + "code": "type foo = bar", + "expected_position": CodeRange((1, 0), (1, 14)), + }, + { + "node": cst.TypeAlias( + cst.Name("foo"), + type_parameters=cst.TypeParameters( + [cst.TypeParam(cst.TypeVar(cst.Name("T")))] + ), + value=cst.BinaryOperation( + cst.Name("bar"), cst.BitOr(), cst.Name("baz") + ), + ), + "code": "type foo[T] = bar | baz", + "expected_position": CodeRange((1, 0), (1, 23)), + }, + { + "node": cst.TypeAlias( + cst.Name("foo"), + type_parameters=cst.TypeParameters( + [ + cst.TypeParam( + cst.TypeVar(cst.Name("T"), bound=cst.Name("str")) + ), + cst.TypeParam(cst.TypeVarTuple(cst.Name("Ts"))), + cst.TypeParam(cst.ParamSpec(cst.Name("KW"))), + ] + ), + value=cst.BinaryOperation( + cst.Name("bar"), cst.BitOr(), cst.Name("baz") + ), + ), + "code": "type foo[T: str, *Ts, **KW] = bar | baz", + "expected_position": CodeRange((1, 0), (1, 39)), + }, + ) + ) + def test_valid(self, **kwargs: Any) -> None: + if not is_native(): + self.skipTest("Disabled in the old parser") + self.validate_node(**kwargs) + + +class TypeAliasParserTest(CSTNodeTest): + @data_provider( + ( + { + "node": cst.SimpleStatementLine( + [ + cst.TypeAlias( + cst.Name("foo"), + cst.Name("bar"), + whitespace_after_name=cst.SimpleWhitespace(" "), + ) + ] + ), + "code": "type foo = bar\n", + "parser": parse_statement, + }, + { + "node": cst.SimpleStatementLine( + [ + cst.TypeAlias( + cst.Name("foo"), + cst.Name("bar"), + type_parameters=cst.TypeParameters( + params=[ + cst.TypeParam( + cst.TypeVar( + cst.Name("T"), cst.Name("str"), cst.Colon() + ), + cst.Comma(), + ), + cst.TypeParam( + cst.ParamSpec( + cst.Name("KW"), + whitespace_after_star=cst.SimpleWhitespace( + " " + ), + ), + cst.Comma( + whitespace_before=cst.SimpleWhitespace(" "), + whitespace_after=cst.SimpleWhitespace(" "), + ), + ), + ], + rbracket=cst.RightSquareBracket( + cst.SimpleWhitespace("") + ), + ), + whitespace_after_name=cst.SimpleWhitespace(" "), + whitespace_after_type=cst.SimpleWhitespace(" "), + whitespace_after_equals=cst.SimpleWhitespace(" "), + whitespace_after_type_parameters=cst.SimpleWhitespace(" "), + semicolon=cst.Semicolon( + whitespace_before=cst.SimpleWhitespace(" "), + whitespace_after=cst.SimpleWhitespace(" "), + ), + ) + ] + ), + "code": "type foo [T:str,** KW , ] = bar ; \n", + "parser": parse_statement, + }, + ) + ) + def test_valid(self, **kwargs: Any) -> None: + if not is_native(): + self.skipTest("Disabled in the old parser") + self.validate_node(**kwargs) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index a28f3fd1..7e1d1c42 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -178,6 +178,7 @@ if TYPE_CHECKING: MatchValue, NameItem, Nonlocal, + ParamSpec, Pass, Raise, Return, @@ -185,6 +186,11 @@ if TYPE_CHECKING: SimpleStatementSuite, Try, TryStar, + TypeAlias, + TypeParam, + TypeParameters, + TypeVar, + TypeVarTuple, While, With, WithItem, @@ -1053,6 +1059,22 @@ class CSTTypedBaseFunctions: def leave_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: pass + @mark_no_op + def visit_ClassDef_type_parameters(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_type_parameters(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_whitespace_after_type_parameters(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_whitespace_after_type_parameters(self, node: "ClassDef") -> None: + pass + @mark_no_op def visit_Colon(self, node: "Colon") -> Optional[bool]: pass @@ -2339,6 +2361,26 @@ class CSTTypedBaseFunctions: def leave_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: pass + @mark_no_op + def visit_FunctionDef_type_parameters(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_type_parameters(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_after_type_parameters( + self, node: "FunctionDef" + ) -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_after_type_parameters( + self, node: "FunctionDef" + ) -> None: + pass + @mark_no_op def visit_GeneratorExp(self, node: "GeneratorExp") -> Optional[bool]: pass @@ -4315,6 +4357,26 @@ class CSTTypedBaseFunctions: def leave_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: pass + @mark_no_op + def visit_ParamSpec(self, node: "ParamSpec") -> Optional[bool]: + pass + + @mark_no_op + def visit_ParamSpec_name(self, node: "ParamSpec") -> None: + pass + + @mark_no_op + def leave_ParamSpec_name(self, node: "ParamSpec") -> None: + pass + + @mark_no_op + def visit_ParamSpec_whitespace_after_star(self, node: "ParamSpec") -> None: + pass + + @mark_no_op + def leave_ParamSpec_whitespace_after_star(self, node: "ParamSpec") -> None: + pass + @mark_no_op def visit_ParamStar(self, node: "ParamStar") -> Optional[bool]: pass @@ -5279,6 +5341,174 @@ class CSTTypedBaseFunctions: def leave_Tuple_rpar(self, node: "Tuple") -> None: pass + @mark_no_op + def visit_TypeAlias(self, node: "TypeAlias") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeAlias_name(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_name(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_value(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_value(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_type_parameters(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_type_parameters(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_whitespace_after_type(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_whitespace_after_type(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_whitespace_after_name(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_whitespace_after_name(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_whitespace_after_type_parameters( + self, node: "TypeAlias" + ) -> None: + pass + + @mark_no_op + def leave_TypeAlias_whitespace_after_type_parameters( + self, node: "TypeAlias" + ) -> None: + pass + + @mark_no_op + def visit_TypeAlias_whitespace_after_equals(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_whitespace_after_equals(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_semicolon(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_semicolon(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeParam(self, node: "TypeParam") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeParam_param(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParam_param(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def visit_TypeParam_comma(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParam_comma(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def visit_TypeParameters(self, node: "TypeParameters") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeParameters_params(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def leave_TypeParameters_params(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def visit_TypeParameters_lbracket(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def leave_TypeParameters_lbracket(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def visit_TypeParameters_rbracket(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def leave_TypeParameters_rbracket(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def visit_TypeVar(self, node: "TypeVar") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeVar_name(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def leave_TypeVar_name(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def visit_TypeVar_bound(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def leave_TypeVar_bound(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def visit_TypeVar_colon(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def leave_TypeVar_colon(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def visit_TypeVarTuple(self, node: "TypeVarTuple") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeVarTuple_name(self, node: "TypeVarTuple") -> None: + pass + + @mark_no_op + def leave_TypeVarTuple_name(self, node: "TypeVarTuple") -> None: + pass + + @mark_no_op + def visit_TypeVarTuple_whitespace_after_star(self, node: "TypeVarTuple") -> None: + pass + + @mark_no_op + def leave_TypeVarTuple_whitespace_after_star(self, node: "TypeVarTuple") -> None: + pass + @mark_no_op def visit_UnaryOperation(self, node: "UnaryOperation") -> Optional[bool]: pass @@ -6003,6 +6233,10 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_ParamSlash(self, original_node: "ParamSlash") -> None: pass + @mark_no_op + def leave_ParamSpec(self, original_node: "ParamSpec") -> None: + pass + @mark_no_op def leave_ParamStar(self, original_node: "ParamStar") -> None: pass @@ -6133,6 +6367,26 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_Tuple(self, original_node: "Tuple") -> None: pass + @mark_no_op + def leave_TypeAlias(self, original_node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeParam(self, original_node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParameters(self, original_node: "TypeParameters") -> None: + pass + + @mark_no_op + def leave_TypeVar(self, original_node: "TypeVar") -> None: + pass + + @mark_no_op + def leave_TypeVarTuple(self, original_node: "TypeVarTuple") -> None: + pass + @mark_no_op def leave_UnaryOperation(self, original_node: "UnaryOperation") -> None: pass @@ -6931,6 +7185,12 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> Union["ParamSlash", MaybeSentinel]: return updated_node + @mark_no_op + def leave_ParamSpec( + self, original_node: "ParamSpec", updated_node: "ParamSpec" + ) -> "ParamSpec": + return updated_node + @mark_no_op def leave_ParamStar( self, original_node: "ParamStar", updated_node: "ParamStar" @@ -7131,6 +7391,38 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> "BaseExpression": return updated_node + @mark_no_op + def leave_TypeAlias( + self, original_node: "TypeAlias", updated_node: "TypeAlias" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_TypeParam( + self, original_node: "TypeParam", updated_node: "TypeParam" + ) -> Union["TypeParam", FlattenSentinel["TypeParam"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_TypeParameters( + self, original_node: "TypeParameters", updated_node: "TypeParameters" + ) -> "TypeParameters": + return updated_node + + @mark_no_op + def leave_TypeVar( + self, original_node: "TypeVar", updated_node: "TypeVar" + ) -> "TypeVar": + return updated_node + + @mark_no_op + def leave_TypeVarTuple( + self, original_node: "TypeVarTuple", updated_node: "TypeVarTuple" + ) -> "TypeVarTuple": + return updated_node + @mark_no_op def leave_UnaryOperation( self, original_node: "UnaryOperation", updated_node: "UnaryOperation" diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 8323578c..7e3761b8 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -2317,6 +2317,32 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): OneOf[SimpleWhitespaceMatchType], AllOf[SimpleWhitespaceMatchType], ] = DoNotCare() + type_parameters: Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + AllOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + ] = DoNotCare() + whitespace_after_type_parameters: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() metadata: Union[ MetadataMatchType, DoNotCareSentinel, @@ -5698,6 +5724,32 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): OneOf[SimpleWhitespaceMatchType], AllOf[SimpleWhitespaceMatchType], ] = DoNotCare() + type_parameters: Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + AllOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + ] = DoNotCare() + whitespace_after_type_parameters: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() metadata: Union[ MetadataMatchType, DoNotCareSentinel, @@ -11962,6 +12014,25 @@ class ParamSlash(BaseMatcherNode): ] = DoNotCare() +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ParamSpec(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + whitespace_after_star: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + @dataclass(frozen=True, eq=False, unsafe_hash=False) class ParamStar(BaseMatcherNode): comma: Union[ @@ -14946,6 +15017,263 @@ class Tuple( ] = DoNotCare() +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeAlias(BaseSmallStatement, BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + type_parameters: Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + AllOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + ] = DoNotCare() + whitespace_after_type: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_name: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_type_parameters: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_equals: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +TypeVarOrTypeVarTupleOrParamSpecMatchType = Union[ + "TypeVar", + "TypeVarTuple", + "ParamSpec", + MetadataMatchType, + MatchIfTrue[Union[cst.TypeVar, cst.TypeVarTuple, cst.ParamSpec]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeParam(BaseMatcherNode): + param: Union[ + TypeVarOrTypeVarTupleOrParamSpecMatchType, + DoNotCareSentinel, + OneOf[TypeVarOrTypeVarTupleOrParamSpecMatchType], + AllOf[TypeVarOrTypeVarTupleOrParamSpecMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +TypeParamMatchType = Union["TypeParam", MetadataMatchType, MatchIfTrue[cst.TypeParam]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeParameters(BaseMatcherNode): + params: Union[ + Sequence[ + Union[ + TypeParamMatchType, + DoNotCareSentinel, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + AtLeastN[ + Union[ + TypeParamMatchType, + DoNotCareSentinel, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + AtMostN[ + Union[ + TypeParamMatchType, + DoNotCareSentinel, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.TypeParam]], + OneOf[ + Union[ + Sequence[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + AtLeastN[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + AtMostN[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.TypeParam]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + AtLeastN[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + AtMostN[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.TypeParam]], + ] + ], + ] = DoNotCare() + lbracket: Union[ + LeftSquareBracketMatchType, + DoNotCareSentinel, + OneOf[LeftSquareBracketMatchType], + AllOf[LeftSquareBracketMatchType], + ] = DoNotCare() + rbracket: Union[ + RightSquareBracketMatchType, + DoNotCareSentinel, + OneOf[RightSquareBracketMatchType], + AllOf[RightSquareBracketMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeVar(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + bound: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + colon: Union[ + ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeVarTuple(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + whitespace_after_star: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + BaseUnaryOpMatchType = Union[ "BaseUnaryOp", MetadataMatchType, MatchIfTrue[cst.BaseUnaryOp] ] @@ -15874,6 +16202,7 @@ __all__ = [ "Or", "Param", "ParamSlash", + "ParamSpec", "ParamStar", "Parameters", "ParenthesizedWhitespace", @@ -15907,7 +16236,12 @@ __all__ = [ "Try", "TryStar", "Tuple", + "TypeAlias", "TypeOf", + "TypeParam", + "TypeParameters", + "TypeVar", + "TypeVarTuple", "UnaryOperation", "While", "With", diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index 87475d05..9d20a23a 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -174,6 +174,7 @@ from libcst._nodes.statement import ( MatchValue, NameItem, Nonlocal, + ParamSpec, Pass, Raise, Return, @@ -181,6 +182,11 @@ from libcst._nodes.statement import ( SimpleStatementSuite, Try, TryStar, + TypeAlias, + TypeParam, + TypeParameters, + TypeVar, + TypeVarTuple, While, With, WithItem, @@ -323,6 +329,7 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { Or: BaseBooleanOp, Param: Union[Param, MaybeSentinel, RemovalSentinel], ParamSlash: Union[ParamSlash, MaybeSentinel], + ParamSpec: ParamSpec, ParamStar: Union[ParamStar, MaybeSentinel], Parameters: Parameters, ParenthesizedWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], @@ -355,6 +362,11 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { Try: Union[BaseStatement, RemovalSentinel], TryStar: Union[BaseStatement, RemovalSentinel], Tuple: BaseExpression, + TypeAlias: Union[BaseSmallStatement, RemovalSentinel], + TypeParam: Union[TypeParam, RemovalSentinel], + TypeParameters: TypeParameters, + TypeVar: TypeVar, + TypeVarTuple: TypeVarTuple, UnaryOperation: BaseExpression, While: Union[BaseStatement, RemovalSentinel], With: Union[BaseStatement, RemovalSentinel], diff --git a/libcst/tests/test_tool.py b/libcst/tests/test_tool.py index 929d7225..2042ef9b 100644 --- a/libcst/tests/test_tool.py +++ b/libcst/tests/test_tool.py @@ -152,6 +152,10 @@ class PrettyPrintNodesTest(UnitTest): whitespace_before_colon=SimpleWhitespace( value='', ), + type_parameters=None, + whitespace_after_type_parameters=SimpleWhitespace( + value='', + ), ), ], header=[], @@ -243,6 +247,7 @@ class PrettyPrintNodesTest(UnitTest): ), ), asynchronous=None, + type_parameters=None, ), ], encoding='utf-8', @@ -532,6 +537,10 @@ class PrettyPrintNodesTest(UnitTest): whitespace_before_colon=SimpleWhitespace( value='', ), + type_parameters=None, + whitespace_after_type_parameters=SimpleWhitespace( + value='', + ), ), ], header=[], @@ -612,6 +621,7 @@ class PrettyPrintNodesTest(UnitTest): ), ), asynchronous=None, + type_parameters=None, ), ], ) diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index d6d8152d..9fbdb1af 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -112,13 +112,15 @@ pub(crate) mod deflated { DeflatedMatchSingleton as MatchSingleton, DeflatedMatchStar as MatchStar, DeflatedMatchTuple as MatchTuple, DeflatedMatchValue as MatchValue, DeflatedNameItem as NameItem, DeflatedNonlocal as Nonlocal, DeflatedOrElse as OrElse, - DeflatedPass as Pass, DeflatedRaise as Raise, DeflatedReturn as Return, - DeflatedSimpleStatementLine as SimpleStatementLine, + DeflatedParamSpec as ParamSpec, DeflatedPass as Pass, DeflatedRaise as Raise, + DeflatedReturn as Return, DeflatedSimpleStatementLine as SimpleStatementLine, DeflatedSimpleStatementSuite as SimpleStatementSuite, DeflatedSmallStatement as SmallStatement, DeflatedStarrableMatchSequenceElement as StarrableMatchSequenceElement, DeflatedStatement as Statement, DeflatedSuite as Suite, DeflatedTry as Try, - DeflatedTryStar as TryStar, DeflatedWhile as While, DeflatedWith as With, - DeflatedWithItem as WithItem, + DeflatedTryStar as TryStar, DeflatedTypeAlias as TypeAlias, DeflatedTypeParam as TypeParam, + DeflatedTypeParameters as TypeParameters, DeflatedTypeVar as TypeVar, + DeflatedTypeVarLike as TypeVarLike, DeflatedTypeVarTuple as TypeVarTuple, + DeflatedWhile as While, DeflatedWith as With, DeflatedWithItem as WithItem, }; } diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 65006ab3..43bb6886 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -304,6 +304,7 @@ pub enum SmallStatement<'a> { Nonlocal(Nonlocal<'a>), AugAssign(AugAssign<'a>), Del(Del<'a>), + TypeAlias(TypeAlias<'a>), } impl<'r, 'a> DeflatedSmallStatement<'r, 'a> { @@ -324,6 +325,7 @@ impl<'r, 'a> DeflatedSmallStatement<'r, 'a> { Self::Nonlocal(l) => Self::Nonlocal(l.with_semicolon(semicolon)), Self::AugAssign(a) => Self::AugAssign(a.with_semicolon(semicolon)), Self::Del(d) => Self::Del(d.with_semicolon(semicolon)), + Self::TypeAlias(t) => Self::TypeAlias(t.with_semicolon(semicolon)), } } } @@ -793,6 +795,7 @@ impl<'a> Codegen<'a> for ImportNames<'a> { #[cst_node] pub struct FunctionDef<'a> { pub name: Name<'a>, + pub type_parameters: Option>, pub params: Parameters<'a>, pub body: Suite<'a>, pub decorators: Vec>, @@ -802,6 +805,7 @@ pub struct FunctionDef<'a> { pub lines_after_decorators: Vec>, pub whitespace_after_def: SimpleWhitespace<'a>, pub whitespace_after_name: SimpleWhitespace<'a>, + pub whitespace_after_type_parameters: SimpleWhitespace<'a>, pub whitespace_before_params: ParenthesizableWhitespace<'a>, pub whitespace_before_colon: SimpleWhitespace<'a>, @@ -838,6 +842,12 @@ impl<'a> Codegen<'a> for FunctionDef<'a> { self.whitespace_after_def.codegen(state); self.name.codegen(state); self.whitespace_after_name.codegen(state); + + if let Some(tp) = &self.type_parameters { + tp.codegen(state); + self.whitespace_after_type_parameters.codegen(state); + } + state.add_token("("); self.whitespace_before_params.codegen(state); self.params.codegen(state); @@ -893,10 +903,26 @@ impl<'r, 'a> Inflate<'a> for DeflatedFunctionDef<'r, 'a> { parse_simple_whitespace(config, &mut (*self.def_tok).whitespace_after.borrow_mut())?; let name = self.name.inflate(config)?; - let whitespace_after_name = parse_simple_whitespace( - config, - &mut (*self.open_paren_tok).whitespace_before.borrow_mut(), - )?; + + let whitespace_after_name; + let mut type_parameters = Default::default(); + let mut whitespace_after_type_parameters = Default::default(); + + if let Some(tp) = self.type_parameters { + let rbracket_tok = tp.rbracket.tok.clone(); + whitespace_after_name = parse_simple_whitespace( + config, + &mut tp.lbracket.tok.whitespace_before.borrow_mut(), + )?; + type_parameters = Some(tp.inflate(config)?); + whitespace_after_type_parameters = + parse_simple_whitespace(config, &mut rbracket_tok.whitespace_after.borrow_mut())?; + } else { + whitespace_after_name = parse_simple_whitespace( + config, + &mut self.open_paren_tok.whitespace_before.borrow_mut(), + )?; + } let whitespace_before_params = parse_parenthesizable_whitespace( config, @@ -914,6 +940,7 @@ impl<'r, 'a> Inflate<'a> for DeflatedFunctionDef<'r, 'a> { let body = self.body.inflate(config)?; Ok(Self::Inflated { name, + type_parameters, params, body, decorators, @@ -923,6 +950,7 @@ impl<'r, 'a> Inflate<'a> for DeflatedFunctionDef<'r, 'a> { lines_after_decorators, whitespace_after_def, whitespace_after_name, + whitespace_after_type_parameters, whitespace_before_params, whitespace_before_colon, }) @@ -1673,6 +1701,7 @@ impl<'r, 'a> Inflate<'a> for DeflatedWhile<'r, 'a> { #[cst_node] pub struct ClassDef<'a> { pub name: Name<'a>, + pub type_parameters: Option>, pub body: Suite<'a>, pub bases: Vec>, pub keywords: Vec>, @@ -1683,6 +1712,7 @@ pub struct ClassDef<'a> { pub lines_after_decorators: Vec>, pub whitespace_after_class: SimpleWhitespace<'a>, pub whitespace_after_name: SimpleWhitespace<'a>, + pub whitespace_after_type_parameters: SimpleWhitespace<'a>, pub whitespace_before_colon: SimpleWhitespace<'a>, pub(crate) class_tok: TokenRef<'a>, @@ -1709,6 +1739,11 @@ impl<'a> Codegen<'a> for ClassDef<'a> { self.name.codegen(state); self.whitespace_after_name.codegen(state); + if let Some(tp) = &self.type_parameters { + tp.codegen(state); + self.whitespace_after_type_parameters.codegen(state); + } + let need_parens = !self.bases.is_empty() || !self.keywords.is_empty(); if let Some(lpar) = &self.lpar { @@ -1753,19 +1788,27 @@ impl<'r, 'a> Inflate<'a> for DeflatedClassDef<'r, 'a> { parse_simple_whitespace(config, &mut (*self.class_tok).whitespace_after.borrow_mut())?; let name = self.name.inflate(config)?; - let (whitespace_after_name, lpar, bases, keywords, rpar) = - if let Some(lpar_tok) = self.lpar_tok.as_mut() { - ( - parse_simple_whitespace(config, &mut lpar_tok.whitespace_before.borrow_mut())?, - self.lpar.map(|lpar| lpar.inflate(config)).transpose()?, - self.bases.inflate(config)?, - self.keywords.inflate(config)?, - self.rpar.map(|rpar| rpar.inflate(config)).transpose()?, - // TODO: set whitespace_after_arg for last arg? - ) - } else { - Default::default() - }; + let (mut whitespace_after_name, mut type_parameters, mut whitespace_after_type_parameters) = + Default::default(); + + if let Some(tparams) = self.type_parameters { + let rbracket_tok = tparams.rbracket.tok.clone(); + whitespace_after_name = parse_simple_whitespace( + config, + &mut tparams.lbracket.tok.whitespace_before.borrow_mut(), + )?; + type_parameters = Some(tparams.inflate(config)?); + whitespace_after_type_parameters = + parse_simple_whitespace(config, &mut rbracket_tok.whitespace_after.borrow_mut())?; + } else if let Some(lpar_tok) = self.lpar_tok.as_mut() { + whitespace_after_name = + parse_simple_whitespace(config, &mut lpar_tok.whitespace_before.borrow_mut())?; + } + + let lpar = self.lpar.inflate(config)?; + let bases = self.bases.inflate(config)?; + let keywords = self.keywords.inflate(config)?; + let rpar = self.rpar.inflate(config)?; let whitespace_before_colon = parse_simple_whitespace( config, @@ -1775,6 +1818,7 @@ impl<'r, 'a> Inflate<'a> for DeflatedClassDef<'r, 'a> { Ok(Self::Inflated { name, + type_parameters, body, bases, keywords, @@ -1784,6 +1828,7 @@ impl<'r, 'a> Inflate<'a> for DeflatedClassDef<'r, 'a> { leading_lines, lines_after_decorators, whitespace_after_class, + whitespace_after_type_parameters, whitespace_after_name, whitespace_before_colon, }) @@ -3332,3 +3377,245 @@ impl<'r, 'a> Inflate<'a> for DeflatedMatchOr<'r, 'a> { }) } } + +#[cst_node] +pub struct TypeVar<'a> { + pub name: Name<'a>, + pub bound: Option>>, + pub colon: Option>, +} + +impl<'a> Codegen<'a> for TypeVar<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.name.codegen(state); + self.colon.codegen(state); + if let Some(bound) = &self.bound { + bound.codegen(state); + } + } +} + +impl<'r, 'a> Inflate<'a> for DeflatedTypeVar<'r, 'a> { + type Inflated = TypeVar<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let name = self.name.inflate(config)?; + let colon = self.colon.inflate(config)?; + let bound = self.bound.inflate(config)?; + Ok(Self::Inflated { name, bound, colon }) + } +} + +#[cst_node] +pub struct TypeVarTuple<'a> { + pub name: Name<'a>, + + pub whitespace_after_star: SimpleWhitespace<'a>, + + pub(crate) star_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for TypeVarTuple<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("*"); + self.whitespace_after_star.codegen(state); + self.name.codegen(state); + } +} + +impl<'r, 'a> Inflate<'a> for DeflatedTypeVarTuple<'r, 'a> { + type Inflated = TypeVarTuple<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_star = + parse_simple_whitespace(config, &mut self.star_tok.whitespace_after.borrow_mut())?; + let name = self.name.inflate(config)?; + Ok(Self::Inflated { + name, + whitespace_after_star, + }) + } +} + +#[cst_node] +pub struct ParamSpec<'a> { + pub name: Name<'a>, + + pub whitespace_after_star: SimpleWhitespace<'a>, + + pub(crate) star_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for ParamSpec<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("**"); + self.whitespace_after_star.codegen(state); + self.name.codegen(state); + } +} + +impl<'r, 'a> Inflate<'a> for DeflatedParamSpec<'r, 'a> { + type Inflated = ParamSpec<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_star = + parse_simple_whitespace(config, &mut self.star_tok.whitespace_after.borrow_mut())?; + let name = self.name.inflate(config)?; + Ok(Self::Inflated { + name, + whitespace_after_star, + }) + } +} + +#[cst_node(Inflate, Codegen)] +pub enum TypeVarLike<'a> { + TypeVar(TypeVar<'a>), + TypeVarTuple(TypeVarTuple<'a>), + ParamSpec(ParamSpec<'a>), +} + +#[cst_node] +pub struct TypeParam<'a> { + pub param: TypeVarLike<'a>, + pub comma: Option>, +} + +impl<'a> Codegen<'a> for TypeParam<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.param.codegen(state); + self.comma.codegen(state); + } +} + +impl<'r, 'a> Inflate<'a> for DeflatedTypeParam<'r, 'a> { + type Inflated = TypeParam<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let param = self.param.inflate(config)?; + let comma = self.comma.inflate(config)?; + Ok(Self::Inflated { param, comma }) + } +} + +impl<'r, 'a> WithComma<'r, 'a> for DeflatedTypeParam<'r, 'a> { + fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { + Self { + comma: Some(comma), + ..self + } + } +} + +#[cst_node] +pub struct TypeParameters<'a> { + pub params: Vec>, + + pub lbracket: LeftSquareBracket<'a>, + pub rbracket: RightSquareBracket<'a>, +} + +impl<'a> Codegen<'a> for TypeParameters<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.lbracket.codegen(state); + let params_len = self.params.len(); + for (idx, param) in self.params.iter().enumerate() { + param.codegen(state); + if idx + 1 < params_len && param.comma.is_none() { + state.add_token(", "); + } + } + self.rbracket.codegen(state); + } +} + +impl<'r, 'a> Inflate<'a> for DeflatedTypeParameters<'r, 'a> { + type Inflated = TypeParameters<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lbracket = self.lbracket.inflate(config)?; + let params = self.params.inflate(config)?; + let rbracket = self.rbracket.inflate(config)?; + Ok(Self::Inflated { + params, + lbracket, + rbracket, + }) + } +} + +#[cst_node] +pub struct TypeAlias<'a> { + pub name: Name<'a>, + pub value: Box>, + pub type_parameters: Option>, + + pub whitespace_after_type: SimpleWhitespace<'a>, + pub whitespace_after_name: Option>, + pub whitespace_after_type_parameters: Option>, + pub whitespace_after_equals: SimpleWhitespace<'a>, + pub semicolon: Option>, + + pub(crate) type_tok: TokenRef<'a>, + pub(crate) lbracket_tok: Option>, + pub(crate) equals_tok: TokenRef<'a>, +} + +impl<'a> Codegen<'a> for TypeAlias<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("type"); + self.whitespace_after_type.codegen(state); + self.name.codegen(state); + if self.whitespace_after_name.is_none() && self.type_parameters.is_none() { + state.add_token(" "); + } else { + self.whitespace_after_name.codegen(state); + } + if self.type_parameters.is_some() { + self.type_parameters.codegen(state); + self.whitespace_after_type_parameters.codegen(state); + } + state.add_token("="); + self.whitespace_after_equals.codegen(state); + self.value.codegen(state); + self.semicolon.codegen(state); + } +} + +impl<'r, 'a> Inflate<'a> for DeflatedTypeAlias<'r, 'a> { + type Inflated = TypeAlias<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let whitespace_after_type = + parse_simple_whitespace(config, &mut self.type_tok.whitespace_after.borrow_mut())?; + let name = self.name.inflate(config)?; + let whitespace_after_name = Some(if let Some(tok) = self.lbracket_tok { + parse_simple_whitespace(config, &mut tok.whitespace_before.borrow_mut()) + } else { + parse_simple_whitespace(config, &mut self.equals_tok.whitespace_before.borrow_mut()) + }?); + let type_parameters = self.type_parameters.inflate(config)?; + let whitespace_after_type_parameters = if type_parameters.is_some() { + Some(parse_simple_whitespace( + config, + &mut self.equals_tok.whitespace_before.borrow_mut(), + )?) + } else { + None + }; + let whitespace_after_equals = + parse_simple_whitespace(config, &mut self.equals_tok.whitespace_after.borrow_mut())?; + let value = self.value.inflate(config)?; + let semicolon = self.semicolon.inflate(config)?; + Ok(Self::Inflated { + name, + value, + type_parameters, + whitespace_after_type, + whitespace_after_name, + whitespace_after_type_parameters, + whitespace_after_equals, + semicolon, + }) + } +} + +impl<'r, 'a> DeflatedTypeAlias<'r, 'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { + Self { semicolon, ..self } + } +} diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 5e23357d..71ea86e7 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -131,6 +131,7 @@ parser! { #[cache] rule simple_stmt() -> SmallStatement<'input, 'a> = assignment() + / &lit("type") s: type_stmt() {SmallStatement::TypeAlias(s)} / e:star_expressions() { SmallStatement::Expr(Expr { value: e, semicolon: None }) } / &lit("return") s:return_stmt() { SmallStatement::Return(s) } // this is expanded from the original grammar's import_stmt rule @@ -320,9 +321,9 @@ parser! { / class_def_raw() rule class_def_raw() -> ClassDef<'input, 'a> - = kw:lit("class") n:name() arg:(l:lpar() a:arguments()? r:rpar() {(l, a, r)})? + = kw:lit("class") n:name() t:type_params()? arg:(l:lpar() a:arguments()? r:rpar() {(l, a, r)})? col:lit(":") b:block() {? - make_class_def(kw, n, arg, col, b) + make_class_def(kw, n, t, arg, col, b) } // Function definitions @@ -337,13 +338,13 @@ parser! { } rule function_def_raw() -> FunctionDef<'input, 'a> - = def:lit("def") n:name() op:lit("(") params:params()? + = def:lit("def") n:name() t:type_params()? op:lit("(") params:params()? cp:lit(")") ty:_returns()? c:lit(":") b:block() { - make_function_def(None, def, n, op, params, cp, ty, c, b) + make_function_def(None, def, n, t, op, params, cp, ty, c, b) } - / asy:tok(Async, "ASYNC") def:lit("def") n:name() op:lit("(") params:params()? + / asy:tok(Async, "ASYNC") def:lit("def") n:name() t:type_params()? op:lit("(") params:params()? cp:lit(")") ty:_returns()? c:lit(":") b:block() { - make_function_def(Some(asy), def, n, op, params, cp, ty, c, b) + make_function_def(Some(asy), def, n, t, op, params, cp, ty, c, b) } // Function parameters @@ -776,6 +777,27 @@ parser! { make_match_keyword_element(arg, eq, value) } + // Type statement + + rule type_stmt() -> TypeAlias<'input, 'a> + = t:lit("type") n:name() ps:type_params()? eq:lit("=") v:expression() { + make_type_alias(t, n, ps, eq, v) + } + + // Type parameter declaration + + rule type_params() -> TypeParameters<'input, 'a> + = lb:lbrak() ps:separated_trailer(, ) rb:rbrak() { + make_type_parameters(lb, comma_separate(ps.0, ps.1, ps.2), rb) + } + + rule type_param() -> TypeParam<'input, 'a> + = n:name() b:type_param_bound()? { make_type_var(n, b) } + / s:lit("*") n:name() { make_type_var_tuple(s, n) } + / s:lit("**") n:name() { make_param_spec(s, n) } + + rule type_param_bound() -> TypeParamBound<'input, 'a> + = c:lit(":") e:expression() { make_type_param_bound(c, e) } // Expressions #[cache] @@ -1511,6 +1533,7 @@ fn make_function_def<'input, 'a>( async_tok: Option>, def_tok: TokenRef<'input, 'a>, name: Name<'input, 'a>, + type_parameters: Option>, open_paren_tok: TokenRef<'input, 'a>, params: Option>, close_paren_tok: TokenRef<'input, 'a>, @@ -1521,6 +1544,7 @@ fn make_function_def<'input, 'a>( let asynchronous = async_tok.as_ref().map(|_| make_async()); FunctionDef { name, + type_parameters, params: params.unwrap_or_default(), body, decorators: Default::default(), @@ -2761,6 +2785,7 @@ fn make_await<'input, 'a>( fn make_class_def<'input, 'a>( class_tok: TokenRef<'input, 'a>, name: Name<'input, 'a>, + type_parameters: Option>, args: Option<( LeftParen<'input, 'a>, Option>>, @@ -2801,6 +2826,7 @@ fn make_class_def<'input, 'a>( } Ok(ClassDef { name, + type_parameters, body, bases, keywords, @@ -3339,3 +3365,81 @@ fn make_match_keyword_element<'input, 'a>( equal_tok, } } + +struct TypeParamBound<'input, 'a>(TokenRef<'input, 'a>, Expression<'input, 'a>); + +fn make_type_param_bound<'input, 'a>( + colon_tok: TokenRef<'input, 'a>, + e: Expression<'input, 'a>, +) -> TypeParamBound<'input, 'a> { + TypeParamBound(colon_tok, e) +} + +fn make_param_spec<'input, 'a>( + star_tok: TokenRef<'input, 'a>, + name: Name<'input, 'a>, +) -> TypeParam<'input, 'a> { + TypeParam { + param: TypeVarLike::ParamSpec(ParamSpec { name, star_tok }), + comma: Default::default(), + } +} + +fn make_type_var_tuple<'input, 'a>( + star_tok: TokenRef<'input, 'a>, + name: Name<'input, 'a>, +) -> TypeParam<'input, 'a> { + TypeParam { + param: TypeVarLike::TypeVarTuple(TypeVarTuple { name, star_tok }), + comma: Default::default(), + } +} + +fn make_type_var<'input, 'a>( + name: Name<'input, 'a>, + bound: Option>, +) -> TypeParam<'input, 'a> { + let (bound, colon) = match bound { + Some(TypeParamBound(c, e)) => (Some(Box::new(e)), Some(make_colon(c))), + _ => (None, None), + }; + TypeParam { + param: TypeVarLike::TypeVar(TypeVar { name, bound, colon }), + comma: Default::default(), + } +} + +fn make_type_parameters<'input, 'a>( + lbracket: LeftSquareBracket<'input, 'a>, + params: Vec>, + rbracket: RightSquareBracket<'input, 'a>, +) -> TypeParameters<'input, 'a> { + TypeParameters { + lbracket, + params, + rbracket, + } +} + +fn make_type_alias<'input, 'a>( + type_tok: TokenRef<'input, 'a>, + name: Name<'input, 'a>, + type_parameters: Option>, + equals_tok: TokenRef<'input, 'a>, + value: Expression<'input, 'a>, +) -> TypeAlias<'input, 'a> { + let lbracket_tok = if let Some(tp) = &type_parameters { + Some(tp.lbracket.tok) + } else { + None + }; + TypeAlias { + type_tok, + name, + type_parameters, + equals_tok, + value: Box::new(value), + semicolon: Default::default(), + lbracket_tok, + } +} diff --git a/native/libcst/tests/fixtures/type_parameters.py b/native/libcst/tests/fixtures/type_parameters.py new file mode 100644 index 00000000..e5329f01 --- /dev/null +++ b/native/libcst/tests/fixtures/type_parameters.py @@ -0,0 +1,57 @@ +# fmt: off + +type TA = int + +type TA1[A] = lambda A: A + +class Outer[A]: + type TA1[A] = None + +type TA1[A, B] = dict[A, B] + +class Outer[A]: + def inner[B](self): + type TA1[C] = TA1[A, B] | int + return TA1 + +def more_generic[T, *Ts, **P](): + type TA[T2, *Ts2, **P2] = tuple[Callable[P, tuple[T, *Ts]], Callable[P2, tuple[T2, *Ts2]]] + return TA + +type Recursive = Recursive + +def func[A](A): return A + +class ClassA: + def func[__A](self, __A): return __A + +class ClassA[A, B](dict[A, B]): + ... + +class ClassA[A]: + def funcB[B](self): + class ClassC[C]: + def funcD[D](self): + return lambda: (A, B, C, D) + return ClassC + +class Child[T](Base[lambda: (int, outer_var, T)]): ... + +type Alias[T: ([T for T in (T, [1])[1]], T)] = [T for T in T.__name__] +type Alias[T: [lambda: T for T in (T, [1])[1]]] = [lambda: T for T in T.__name__] + +class Foo[T: Foo, U: (Foo, Foo)]: + pass + +def func[T](a: T = "a", *, b: T = "b"): + return (a, b) + +def func1[A: str, B: str | int, C: (int, str)](): + return (A, B, C) + +type A [ T , * V ] =foo;type B=A + +def AAAAAAAAAAAAAAAAAA [ T : int ,*Ts , ** TT ] ():pass +class AAAAAAAAAAAAAAAAAA [ T : int ,*Ts , ** TT ] :pass + +def yikes[A:int,*B,**C](*d:*tuple[A,*B,...])->A:pass \ No newline at end of file From 9c263aa8977962a870ce2770d2aa18ee0dacb344 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Sat, 2 Sep 2023 10:56:20 +0200 Subject: [PATCH 402/632] Support files with mixed newlines (#1007) * Add test case with mixed newlines * Split lines by any newline character and not just by default * Add unit test, remove copied --- native/Cargo.lock | 1 + native/libcst/Cargo.toml | 1 + .../libcst/src/tokenizer/whitespace_parser.rs | 64 +++++++++++++++++-- native/libcst/tests/.gitattributes | 1 + .../libcst/tests/fixtures/mixed_newlines.py | 3 + 5 files changed, 64 insertions(+), 6 deletions(-) create mode 100644 native/libcst/tests/.gitattributes create mode 100644 native/libcst/tests/fixtures/mixed_newlines.py diff --git a/native/Cargo.lock b/native/Cargo.lock index 06a718fd..6b03c3f7 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -356,6 +356,7 @@ dependencies = [ "difference", "itertools 0.11.0", "libcst_derive", + "memchr", "paste", "peg", "pyo3", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 6c842a39..8d50e364 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -35,6 +35,7 @@ thiserror = "1.0.37" peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" +memchr = "2.5.0" libcst_derive = { path = "../libcst_derive" } [dev-dependencies] diff --git a/native/libcst/src/tokenizer/whitespace_parser.rs b/native/libcst/src/tokenizer/whitespace_parser.rs index be5b7752..d76357b4 100644 --- a/native/libcst/src/tokenizer/whitespace_parser.rs +++ b/native/libcst/src/tokenizer/whitespace_parser.rs @@ -7,6 +7,7 @@ use crate::nodes::{ Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, SimpleWhitespace, TrailingWhitespace, }; +use memchr::memchr2_iter; use regex::Regex; use thiserror::Error; @@ -16,9 +17,8 @@ use super::TokType; thread_local! { static SIMPLE_WHITESPACE_RE: Regex = Regex::new(r"\A([ \f\t]|\\(\r\n?|\n))*").expect("regex"); -static NEWLINE_RE: Regex = Regex::new(r"\A(\r\n?|\n)").expect("regex"); -static COMMENT_RE: Regex = Regex::new(r"\A#[^\r\n]*").expect("regex"); -static NEWLINE_RE_2: Regex = Regex::new(r"\r\n?|\n").expect("regex"); + static NEWLINE_RE: Regex = Regex::new(r"\A(\r\n?|\n)").expect("regex"); + static COMMENT_RE: Regex = Regex::new(r"\A#[^\r\n]*").expect("regex"); } #[allow(clippy::upper_case_acronyms, clippy::enum_variant_names)] @@ -74,12 +74,44 @@ impl<'a> Config<'a> { break; } } - let default_newline = - NEWLINE_RE_2.with(|r| r.find(input).map(|m| m.as_str()).unwrap_or("\n")); + + let mut lines = Vec::new(); + let mut start = 0; + let mut newline_positions = memchr2_iter(b'\n', b'\r', input.as_bytes()); + + while let Some(newline_position) = newline_positions.next() { + let newline_character = input.as_bytes()[newline_position] as char; + + let len = if newline_character == '\r' + && input.as_bytes().get(newline_position + 1) == Some(&b'\n') + { + // Skip the next '\n' + newline_positions.next(); + 2 + } else { + 1 + }; + + let end = newline_position + len; + lines.push(&input[start..end]); + start = end; + } + + // Push the last line if it isn't terminated by a newline character + if start < input.len() { + lines.push(&input[start..]); + } + + let default_newline = match lines.first().map(|line| line.as_bytes()).unwrap_or(&[]) { + [.., b'\r', b'\n'] => "\r\n", + [.., b'\n'] => "\n", + [.., b'\r'] => "\r", + _ => "\n", + }; Self { input, - lines: input.split_inclusive(default_newline).collect(), + lines, default_newline, default_indent, } @@ -401,3 +433,23 @@ pub fn parse_parenthesized_whitespace<'a>( Ok(None) } } + +#[cfg(test)] +mod tests { + use crate::{tokenize, Config, Result}; + + #[test] + fn config_mixed_newlines() -> Result<'static, ()> { + let source = "'' % {\n'test1': '',\r 'test2': '',\r\n}"; + let tokens = tokenize(source)?; + + let config = Config::new(source, &tokens); + + assert_eq!( + &config.lines, + &["'' % {\n", "'test1': '',\r", " 'test2': '',\r\n", "}"] + ); + + Ok(()) + } +} diff --git a/native/libcst/tests/.gitattributes b/native/libcst/tests/.gitattributes new file mode 100644 index 00000000..e50e9b7b --- /dev/null +++ b/native/libcst/tests/.gitattributes @@ -0,0 +1 @@ +fixtures/mixed_newlines.py autocrlf=false \ No newline at end of file diff --git a/native/libcst/tests/fixtures/mixed_newlines.py b/native/libcst/tests/fixtures/mixed_newlines.py new file mode 100644 index 00000000..935a8b45 --- /dev/null +++ b/native/libcst/tests/fixtures/mixed_newlines.py @@ -0,0 +1,3 @@ +"" % { + 'test1': '', 'test2': '', +} From 377a292d0d43a71b1e9704799d7e46b621a97018 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 3 Sep 2023 18:16:10 +0100 Subject: [PATCH 403/632] Add crate metadata --- native/libcst/Cargo.toml | 10 ++++++++-- native/libcst_derive/Cargo.toml | 5 +++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 8d50e364..6ee3bccf 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -9,6 +9,12 @@ version = "0.1.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" +description = "A Python parser and Concrete Syntax Tree library." +license-file = "../../LICENSE" +homepage = "https://github.com/Instagram/LibCST" +documentation = "https://libcst.rtfd.org" +keywords = ["python", "cst", "ast"] +categories = ["parser-implementations"] [lib] name = "libcst_native" @@ -25,7 +31,7 @@ path = "src/bin.rs" # Once https://github.com/PyO3/pyo3/pull/1123 lands, it may be better to use # `-Zextra-link-arg` for this instead. default = ["py"] -py = ["pyo3","pyo3/extension-module"] +py = ["pyo3", "pyo3/extension-module"] trace = ["peg/trace"] [dependencies] @@ -36,7 +42,7 @@ peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" memchr = "2.5.0" -libcst_derive = { path = "../libcst_derive" } +libcst_derive = { path = "../libcst_derive", version = "0.1.0" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 5ce55e09..d3453032 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -2,6 +2,11 @@ name = "libcst_derive" version = "0.1.0" edition = "2018" +description = "Proc macro helpers for libcst." +license-file = "../../LICENSE" +homepage = "https://github.com/Instagram/LibCST" +documentation = "https://libcst.rtfd.org" +keywords = ["macros", "python"] [lib] proc-macro = true From 94dd20e20ee87699d7f2c7815744f56f805723ef Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 9 Sep 2023 17:03:01 +0100 Subject: [PATCH 404/632] parser: remove Regexes from whitespace parser (#1008) removing Regexes from whitespace parser allows ditching of thread local storage + lazy initialization cost This shows a modest 2% improvement in overall parse time (inflate is improved by 10%) --- .../libcst/src/tokenizer/whitespace_parser.rs | 140 +++++++++++++----- 1 file changed, 106 insertions(+), 34 deletions(-) diff --git a/native/libcst/src/tokenizer/whitespace_parser.rs b/native/libcst/src/tokenizer/whitespace_parser.rs index d76357b4..3f96bab2 100644 --- a/native/libcst/src/tokenizer/whitespace_parser.rs +++ b/native/libcst/src/tokenizer/whitespace_parser.rs @@ -7,20 +7,13 @@ use crate::nodes::{ Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, SimpleWhitespace, TrailingWhitespace, }; -use memchr::memchr2_iter; -use regex::Regex; +use memchr::{memchr2, memchr2_iter}; use thiserror::Error; use crate::Token; use super::TokType; -thread_local! { - static SIMPLE_WHITESPACE_RE: Regex = Regex::new(r"\A([ \f\t]|\\(\r\n?|\n))*").expect("regex"); - static NEWLINE_RE: Regex = Regex::new(r"\A(\r\n?|\n)").expect("regex"); - static COMMENT_RE: Regex = Regex::new(r"\A#[^\r\n]*").expect("regex"); -} - #[allow(clippy::upper_case_acronyms, clippy::enum_variant_names)] #[derive(Error, Debug, PartialEq, Eq)] pub enum WhitespaceError { @@ -231,29 +224,34 @@ pub fn parse_empty_lines<'a>( pub fn parse_comment<'a>(config: &Config<'a>, state: &mut State) -> Result>> { let newline_after = config.get_line_after_column(state.line, state.column_byte)?; - if let Some(comment_match) = COMMENT_RE.with(|r| r.find(newline_after)) { - let comment_str = comment_match.as_str(); - advance_this_line( - config, - state, - comment_str.chars().count(), - comment_str.len(), - )?; - return Ok(Some(Comment(comment_str))); + if newline_after.as_bytes().first() != Some(&b'#') { + return Ok(None); } - Ok(None) + let comment_str = if let Some(idx) = memchr2(b'\n', b'\r', newline_after.as_bytes()) { + &newline_after[..idx] + } else { + newline_after + }; + advance_this_line( + config, + state, + comment_str.chars().count(), + comment_str.len(), + )?; + Ok(Some(Comment(comment_str))) } pub fn parse_newline<'a>(config: &Config<'a>, state: &mut State) -> Result>> { let newline_after = config.get_line_after_column(state.line, state.column_byte)?; - if let Some(newline_match) = NEWLINE_RE.with(|r| r.find(newline_after)) { - let newline_str = newline_match.as_str(); - advance_this_line( - config, - state, - newline_str.chars().count(), - newline_str.len(), - )?; + let len = match newline_after.as_bytes() { + [b'\n', ..] => 1, + [b'\r', b'\n', ..] => 2, + [b'\r', ..] => 1, + _ => 0, + }; + if len > 0 { + let newline_str = &newline_after[..len]; + advance_this_line(config, state, len, len)?; if state.column_byte != config.get_line(state.line)?.len() { return Err(WhitespaceError::InternalError(format!( "Found newline at ({}, {}) but it's not EOL", @@ -376,13 +374,18 @@ pub fn parse_simple_whitespace<'a>( state: &mut State, ) -> Result> { let capture_ws = |line, col| -> Result<&'a str> { - let x = config.get_line_after_column(line, col); - let x = x?; - Ok(SIMPLE_WHITESPACE_RE.with(|r| { - r.find(x) - .expect("SIMPLE_WHITESPACE_RE supports 0-length matches, so it must always match") - .as_str() - })) + let line = config.get_line_after_column(line, col)?; + let bytes = line.as_bytes(); + let mut idx = 0; + while idx < bytes.len() { + match bytes[idx..] { + [b' ' | b'\t' | b'\x0c', ..] => idx += 1, + [b'\\', b'\r', b'\n', ..] => idx += 3, + [b'\\', b'\r' | b'\n', ..] => idx += 2, + _ => break, + } + } + Ok(&line[..idx]) }; let start_offset = state.byte_offset; let mut prev_line: &str; @@ -436,7 +439,9 @@ pub fn parse_parenthesized_whitespace<'a>( #[cfg(test)] mod tests { - use crate::{tokenize, Config, Result}; + use crate::{tokenize, Comment, Config, Result, SimpleWhitespace}; + + use super::{parse_comment, parse_simple_whitespace}; #[test] fn config_mixed_newlines() -> Result<'static, ()> { @@ -452,4 +457,71 @@ mod tests { Ok(()) } + + fn _parse_simple_whitespace(src: &str) -> Result { + let tokens = tokenize(src)?; + let config = Config::new(src, &tokens); + let mut state = Default::default(); + Ok(parse_simple_whitespace(&config, &mut state)?) + } + + #[test] + fn simple_whitespace_line_continuations() -> Result<'static, ()> { + assert_eq!( + _parse_simple_whitespace(" \\\n # foo")?, + SimpleWhitespace(" \\\n ") + ); + + assert_eq!( + _parse_simple_whitespace(" \\\r # foo")?, + SimpleWhitespace(" \\\r ") + ); + assert_eq!( + _parse_simple_whitespace(" \\\r\n # foo")?, + SimpleWhitespace(" \\\r\n ") + ); + + assert_eq!( + _parse_simple_whitespace(" \\\r\n\\\n # foo")?, + SimpleWhitespace(" \\\r\n\\\n ") + ); + + Ok(()) + } + + #[test] + fn simple_whitespace_mixed() -> Result<'static, ()> { + assert_eq!( + _parse_simple_whitespace(" \t\x0clol")?, + SimpleWhitespace(" \t\x0c"), + ); + + Ok(()) + } + + fn _parse_comment(src: &str) -> Result> { + let tokens = tokenize(src)?; + let config = Config::new(src, &tokens); + let mut state = Default::default(); + Ok(parse_comment(&config, &mut state)?) + } + + #[test] + fn single_comment() -> Result<'static, ()> { + assert_eq!(_parse_comment("# foo\n# bar")?, Some(Comment("# foo"))); + Ok(()) + } + + #[test] + fn comment_until_eof() -> Result<'static, ()> { + assert_eq!(_parse_comment("#")?, Some(Comment("#"))); + Ok(()) + } + + #[test] + fn no_comment() -> Result<'static, ()> { + assert_eq!(_parse_comment("foo")?, None); + assert_eq!(_parse_comment("\n")?, None); + Ok(()) + } } From f469bcc7550e144bb2dfdf42ad7a3a129accff6e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Sep 2023 03:36:31 -0700 Subject: [PATCH 405/632] Bump actions/checkout from 3 to 4 (#1015) Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 4 ++-- .github/workflows/ci.yml | 14 +++++++------- .github/workflows/pypi_upload.yml | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 49c44772..aa9e1a9b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,7 +10,7 @@ jobs: outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - id: set-matrix # match github.ref to the on_ref_regex field in the json # to skip running linux/aarch64 builds on PRs @@ -39,7 +39,7 @@ jobs: CIBW_ARCHS: ${{ matrix.vers }} CIBW_BUILD_VERBOSITY: 1 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-python@v4 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f928d012..e2dfc4e1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ jobs: os: [macos-latest, ubuntu-latest, windows-latest] python-version: ["3.8", "3.9", "3.10", "3.11", "3.12-dev"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-python@v4 @@ -48,7 +48,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-python@v4 @@ -65,7 +65,7 @@ jobs: typecheck: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-python@v4 @@ -81,7 +81,7 @@ jobs: coverage: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-python@v4 @@ -110,7 +110,7 @@ jobs: docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-python@v4 @@ -137,7 +137,7 @@ jobs: matrix: os: [ubuntu-latest, macos-latest, windows-latest] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions-rs/toolchain@v1 with: toolchain: stable @@ -178,7 +178,7 @@ jobs: name: Rustfmt runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions-rs/toolchain@v1 with: profile: minimal diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index b399ef5b..7593f3de 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Download binary wheels From b509cc8b08fdbce5df5c91d43d4e2152a5e71c28 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Sep 2023 03:37:50 -0700 Subject: [PATCH 406/632] Bump black from 23.7.0 to 23.9.1 (#1017) Bumps [black](https://github.com/psf/black) from 23.7.0 to 23.9.1. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.7.0...23.9.1) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4718e149..8dde45c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ dependencies = [ [project.optional-dependencies] dev = [ - "black==23.7.0", + "black==23.9.1", "coverage>=4.5.4", "build>=0.10.0", "fixit==2.0.0.post1", From 9d869b6639cdd81a3a6ff0794d41df732103f03a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 16 Sep 2023 03:59:29 -0700 Subject: [PATCH 407/632] scope_provider: Simplify parent resolution (#1013) This PR introduces `Scope._next_visible_parent` which deduplicates much of the logic between `_contains_in_self_or_parent`, `_find_assignment_target_parent`, and `_getitem_from_self_or_parent`. This will be helpful when implementing scope resolution for the future `AnnotationScope`. There should be no functionality change. --- libcst/metadata/scope_provider.py | 80 ++++++++++++++----------------- 1 file changed, 35 insertions(+), 45 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 88de3fc4..4268c5d4 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -436,20 +436,34 @@ class Scope(abc.ABC): def _find_assignment_target(self, name: str) -> "Scope": return self - def _find_assignment_target_parent(self, name: str) -> "Scope": - return self - def record_access(self, name: str, access: Access) -> None: self._accesses_by_name[name].add(access) self._accesses_by_node[access.node].add(access) - def _getitem_from_self_or_parent(self, name: str) -> Set[BaseAssignment]: - """Overridden by ClassScope to hide it's assignments from child scopes.""" - return self[name] + def _is_visible_from_children(self) -> bool: + """Returns if the assignments in this scope can be accessed from children. - def _contains_in_self_or_parent(self, name: str) -> bool: - """Overridden by ClassScope to hide it's assignments from child scopes.""" - return name in self + This is normally True, except for class scopes:: + + def outer_fn(): + v = ... # outer_fn's declaration + class InnerCls: + v = ... # shadows outer_fn's declaration + class InnerInnerCls: + v = ... # shadows all previous declarations of v + def inner_fn(): + nonlocal v + v = ... # this refers to outer_fn's declaration + # and not to any of the inner classes' as those are + # hidden from their children. + """ + return True + + def _next_visible_parent(self, first: Optional["Scope"] = None) -> "Scope": + parent = first if first is not None else self.parent + while not parent._is_visible_from_children(): + parent = parent.parent + return parent @abc.abstractmethod def __contains__(self, name: str) -> bool: @@ -630,13 +644,14 @@ class GlobalScope(Scope): def __contains__(self, name: str) -> bool: if name in self._assignments: return len(self._assignments[name]) > 0 - return self.parent._contains_in_self_or_parent(name) + return name in self._next_visible_parent() def __getitem__(self, name: str) -> Set[BaseAssignment]: if name in self._assignments: return self._assignments[name] - else: - return self.parent._getitem_from_self_or_parent(name) + + parent = self._next_visible_parent() + return parent[name] def record_global_overwrite(self, name: str) -> None: pass @@ -672,7 +687,8 @@ class LocalScope(Scope, abc.ABC): def _find_assignment_target(self, name: str) -> "Scope": if name in self._scope_overwrites: - return self._scope_overwrites[name]._find_assignment_target_parent(name) + scope = self._scope_overwrites[name] + return self._next_visible_parent(scope)._find_assignment_target(name) else: return super()._find_assignment_target(name) @@ -681,15 +697,16 @@ class LocalScope(Scope, abc.ABC): return name in self._scope_overwrites[name] if name in self._assignments: return len(self._assignments[name]) > 0 - return self.parent._contains_in_self_or_parent(name) + return name in self._next_visible_parent() def __getitem__(self, name: str) -> Set[BaseAssignment]: if name in self._scope_overwrites: - return self._scope_overwrites[name]._getitem_from_self_or_parent(name) + scope = self._scope_overwrites[name] + return self._next_visible_parent(scope)[name] if name in self._assignments: return self._assignments[name] else: - return self.parent._getitem_from_self_or_parent(name) + return self._next_visible_parent()[name] def _make_name_prefix(self) -> str: # filter falsey strings out @@ -711,35 +728,8 @@ class ClassScope(LocalScope): When a class is defined, it creates a ClassScope. """ - def _find_assignment_target_parent(self, name: str) -> "Scope": - """ - Forward the assignment to parent. - - def outer_fn(): - v = ... # outer_fn's declaration - class InnerCls: - v = ... # shadows outer_fn's declaration - def inner_fn(): - nonlocal v - v = ... # this should actually refer to outer_fn's declaration - # and not to InnerCls's, because InnerCls's scope is - # hidden from its children. - - """ - return self.parent._find_assignment_target_parent(name) - - def _getitem_from_self_or_parent(self, name: str) -> Set[BaseAssignment]: - """ - Class variables are only accessible using ClassName.attribute, cls.attribute, or - self.attribute in child scopes. They cannot be accessed with their bare names. - """ - return self.parent._getitem_from_self_or_parent(name) - - def _contains_in_self_or_parent(self, name: str) -> bool: - """ - See :meth:`_getitem_from_self_or_parent` - """ - return self.parent._contains_in_self_or_parent(name) + def _is_visible_from_children(self) -> bool: + return False def _make_name_prefix(self) -> str: # filter falsey strings out From 37277e5fe71781148b897d4cb4a7125e61b580f3 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 16 Sep 2023 04:02:14 -0700 Subject: [PATCH 408/632] add upper bound to pyo3 dependency --- native/libcst/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 6ee3bccf..b52b9015 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.9" -pyo3 = { version = ">=0.17", optional = true } +pyo3 = { version = ">=0.17,<0.20", optional = true } thiserror = "1.0.37" peg = "0.8.1" chic = "1.2.2" From a27c4c745c09bd602dad512371d19a92f2c20264 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Sep 2023 09:16:01 +0100 Subject: [PATCH 409/632] Bump pypa/cibuildwheel from 2.15.0 to 2.16.0 (#1025) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.15.0 to 2.16.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.15.0...v2.16.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index aa9e1a9b..23653d56 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -58,7 +58,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.15.0 + uses: pypa/cibuildwheel@v2.16.0 - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl From 5346bbfbddd535fdbd6046316f74a20d1f7b8f31 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 30 Sep 2023 10:20:56 +0100 Subject: [PATCH 410/632] Bump Swatinem/rust-cache from 2.6.2 to 2.7.0 (#1020) Bumps [Swatinem/rust-cache](https://github.com/swatinem/rust-cache) from 2.6.2 to 2.7.0. - [Release notes](https://github.com/swatinem/rust-cache/releases) - [Changelog](https://github.com/Swatinem/rust-cache/blob/master/CHANGELOG.md) - [Commits](https://github.com/swatinem/rust-cache/compare/v2.6.2...v2.7.0) --- updated-dependencies: - dependency-name: Swatinem/rust-cache dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 23653d56..4b466a03 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -50,7 +50,7 @@ jobs: python-version: "3.10" - name: Rust Cache if: ${{ !contains(matrix.os, 'self-hosted') }} - uses: Swatinem/rust-cache@v2.6.2 + uses: Swatinem/rust-cache@v2.7.0 with: working-directory: native - name: Disable scmtools local scheme diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e2dfc4e1..f81b3bf4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: with: toolchain: stable - name: Rust Cache - uses: Swatinem/rust-cache@v2.6.2 + uses: Swatinem/rust-cache@v2.7.0 with: shared-key: "shared" workspaces: native @@ -143,7 +143,7 @@ jobs: toolchain: stable components: rustfmt, clippy - name: Rust Cache - uses: Swatinem/rust-cache@v2.6.2 + uses: Swatinem/rust-cache@v2.7.0 with: shared-key: "shared" workspaces: native From 46060119a4a89e8d16604a8c36759022b3a4257e Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 30 Sep 2023 11:16:27 +0100 Subject: [PATCH 411/632] Scope provider changes for type annotations (#1014) --- libcst/helpers/common.py | 6 +- libcst/metadata/scope_provider.py | 144 +++++++++--- libcst/metadata/tests/test_scope_provider.py | 227 +++++++++++++++++++ 3 files changed, 340 insertions(+), 37 deletions(-) diff --git a/libcst/helpers/common.py b/libcst/helpers/common.py index 0965abeb..16c77669 100644 --- a/libcst/helpers/common.py +++ b/libcst/helpers/common.py @@ -3,12 +3,12 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -from typing import Type +from typing import Type, TypeVar -from libcst._types import CSTNodeT +T = TypeVar("T") -def ensure_type(node: object, nodetype: Type[CSTNodeT]) -> CSTNodeT: +def ensure_type(node: object, nodetype: Type[T]) -> T: """ Takes any python object, and a LibCST :class:`~libcst.CSTNode` subclass and refines the type of the python object. This is most useful when you already diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 4268c5d4..73bb61f5 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -7,7 +7,7 @@ import abc import builtins from collections import defaultdict -from contextlib import contextmanager +from contextlib import contextmanager, ExitStack from dataclasses import dataclass from enum import auto, Enum from typing import ( @@ -51,6 +51,10 @@ _ASSIGNMENT_LIKE_NODES = ( cst.Nonlocal, cst.Parameters, cst.WithItem, + cst.TypeVar, + cst.TypeAlias, + cst.TypeVarTuple, + cst.ParamSpec, ) @@ -116,7 +120,7 @@ class Access: self.__assignments.add(assignment) def record_assignments(self, name: str) -> None: - assignments = self.scope[name] + assignments = self.scope._resolve_scope_for_access(name, self.scope) # filter out assignments that happened later than this access previous_assignments = { assignment @@ -124,7 +128,9 @@ class Access: if assignment.scope != self.scope or assignment._index < self.__index } if not previous_assignments and assignments and self.scope.parent != self.scope: - previous_assignments = self.scope.parent[name] + previous_assignments = self.scope.parent._resolve_scope_for_access( + name, self.scope + ) self.__assignments |= previous_assignments @@ -440,7 +446,7 @@ class Scope(abc.ABC): self._accesses_by_name[name].add(access) self._accesses_by_node[access.node].add(access) - def _is_visible_from_children(self) -> bool: + def _is_visible_from_children(self, from_scope: "Scope") -> bool: """Returns if the assignments in this scope can be accessed from children. This is normally True, except for class scopes:: @@ -459,9 +465,11 @@ class Scope(abc.ABC): """ return True - def _next_visible_parent(self, first: Optional["Scope"] = None) -> "Scope": + def _next_visible_parent( + self, from_scope: "Scope", first: Optional["Scope"] = None + ) -> "Scope": parent = first if first is not None else self.parent - while not parent._is_visible_from_children(): + while not parent._is_visible_from_children(from_scope): parent = parent.parent return parent @@ -470,7 +478,6 @@ class Scope(abc.ABC): """Check if the name str exist in current scope by ``name in scope``.""" ... - @abc.abstractmethod def __getitem__(self, name: str) -> Set[BaseAssignment]: """ Get assignments given a name str by ``scope[name]``. @@ -508,6 +515,12 @@ class Scope(abc.ABC): defined a given name by the time a piece of code is executed. For the above example, value would resolve to a set of both assignments. """ + return self._resolve_scope_for_access(name, self) + + @abc.abstractmethod + def _resolve_scope_for_access( + self, name: str, from_scope: "Scope" + ) -> Set[BaseAssignment]: ... def __hash__(self) -> int: @@ -612,7 +625,9 @@ class BuiltinScope(Scope): def __contains__(self, name: str) -> bool: return hasattr(builtins, name) - def __getitem__(self, name: str) -> Set[BaseAssignment]: + def _resolve_scope_for_access( + self, name: str, from_scope: "Scope" + ) -> Set[BaseAssignment]: if name in self._assignments: return self._assignments[name] if hasattr(builtins, name): @@ -644,13 +659,15 @@ class GlobalScope(Scope): def __contains__(self, name: str) -> bool: if name in self._assignments: return len(self._assignments[name]) > 0 - return name in self._next_visible_parent() + return name in self._next_visible_parent(self) - def __getitem__(self, name: str) -> Set[BaseAssignment]: + def _resolve_scope_for_access( + self, name: str, from_scope: "Scope" + ) -> Set[BaseAssignment]: if name in self._assignments: return self._assignments[name] - parent = self._next_visible_parent() + parent = self._next_visible_parent(from_scope) return parent[name] def record_global_overwrite(self, name: str) -> None: @@ -688,7 +705,7 @@ class LocalScope(Scope, abc.ABC): def _find_assignment_target(self, name: str) -> "Scope": if name in self._scope_overwrites: scope = self._scope_overwrites[name] - return self._next_visible_parent(scope)._find_assignment_target(name) + return self._next_visible_parent(self, scope)._find_assignment_target(name) else: return super()._find_assignment_target(name) @@ -697,16 +714,22 @@ class LocalScope(Scope, abc.ABC): return name in self._scope_overwrites[name] if name in self._assignments: return len(self._assignments[name]) > 0 - return name in self._next_visible_parent() + return name in self._next_visible_parent(self) - def __getitem__(self, name: str) -> Set[BaseAssignment]: + def _resolve_scope_for_access( + self, name: str, from_scope: "Scope" + ) -> Set[BaseAssignment]: if name in self._scope_overwrites: scope = self._scope_overwrites[name] - return self._next_visible_parent(scope)[name] + return self._next_visible_parent( + from_scope, scope + )._resolve_scope_for_access(name, from_scope) if name in self._assignments: return self._assignments[name] else: - return self._next_visible_parent()[name] + return self._next_visible_parent(from_scope)._resolve_scope_for_access( + name, from_scope + ) def _make_name_prefix(self) -> str: # filter falsey strings out @@ -728,8 +751,8 @@ class ClassScope(LocalScope): When a class is defined, it creates a ClassScope. """ - def _is_visible_from_children(self) -> bool: - return False + def _is_visible_from_children(self, from_scope: "Scope") -> bool: + return from_scope.parent is self and isinstance(from_scope, AnnotationScope) def _make_name_prefix(self) -> str: # filter falsey strings out @@ -755,6 +778,19 @@ class ComprehensionScope(LocalScope): return ".".join(filter(None, [self.parent._name_prefix, ""])) +class AnnotationScope(LocalScope): + """ + Scopes used for type aliases and type parameters as defined by PEP-695. + + These scopes are created for type parameters using the special syntax, as well as + type aliases. See https://peps.python.org/pep-0695/#scoping-behavior for more. + """ + + def _make_name_prefix(self) -> str: + # these scopes are transparent for the purposes of qualified names + return self.parent._name_prefix + + # Generates dotted names from an Attribute or Name node: # Attribute(value=Name(value="a"), attr=Name(value="b")) -> ("a.b", "a") # each string has the corresponding CSTNode attached to it @@ -822,6 +858,7 @@ class DeferredAccess: class ScopeVisitor(cst.CSTVisitor): # since it's probably not useful. That can makes this visitor cleaner. def __init__(self, provider: "ScopeProvider") -> None: + super().__init__() self.provider: ScopeProvider = provider self.scope: Scope = GlobalScope() self.__deferred_accesses: List[DeferredAccess] = [] @@ -992,15 +1029,22 @@ class ScopeVisitor(cst.CSTVisitor): self.scope.record_assignment(node.name.value, node) self.provider.set_metadata(node.name, self.scope) - with self._new_scope(FunctionScope, node, get_full_name_for_node(node.name)): - node.params.visit(self) - node.body.visit(self) + with ExitStack() as stack: + if node.type_parameters: + stack.enter_context(self._new_scope(AnnotationScope, node, None)) + node.type_parameters.visit(self) - for decorator in node.decorators: - decorator.visit(self) - returns = node.returns - if returns: - returns.visit(self) + with self._new_scope( + FunctionScope, node, get_full_name_for_node(node.name) + ): + node.params.visit(self) + node.body.visit(self) + + for decorator in node.decorators: + decorator.visit(self) + returns = node.returns + if returns: + returns.visit(self) return False @@ -1032,14 +1076,20 @@ class ScopeVisitor(cst.CSTVisitor): self.provider.set_metadata(node.name, self.scope) for decorator in node.decorators: decorator.visit(self) - for base in node.bases: - base.visit(self) - for keyword in node.keywords: - keyword.visit(self) - with self._new_scope(ClassScope, node, get_full_name_for_node(node.name)): - for statement in node.body.body: - statement.visit(self) + with ExitStack() as stack: + if node.type_parameters: + stack.enter_context(self._new_scope(AnnotationScope, node, None)) + node.type_parameters.visit(self) + + for base in node.bases: + base.visit(self) + for keyword in node.keywords: + keyword.visit(self) + + with self._new_scope(ClassScope, node, get_full_name_for_node(node.name)): + for statement in node.body.body: + statement.visit(self) return False def visit_ClassDef_bases(self, node: cst.ClassDef) -> None: @@ -1163,7 +1213,7 @@ class ScopeVisitor(cst.CSTVisitor): access.scope.record_access(name, access) for (scope, name), accesses in scope_name_accesses.items(): - for assignment in scope[name]: + for assignment in scope._resolve_scope_for_access(name, scope): assignment.record_accesses(accesses) self.__deferred_accesses = [] @@ -1174,6 +1224,32 @@ class ScopeVisitor(cst.CSTVisitor): self.scope._assignment_count += 1 super().on_leave(original_node) + def visit_TypeAlias(self, node: cst.TypeAlias) -> Optional[bool]: + self.scope.record_assignment(node.name.value, node) + + with self._new_scope(AnnotationScope, node, None): + if node.type_parameters is not None: + node.type_parameters.visit(self) + node.value.visit(self) + + return False + + def visit_TypeVar(self, node: cst.TypeVar) -> Optional[bool]: + self.scope.record_assignment(node.name.value, node) + + if node.bound is not None: + node.bound.visit(self) + + return False + + def visit_TypeVarTuple(self, node: cst.TypeVarTuple) -> Optional[bool]: + self.scope.record_assignment(node.name.value, node) + return False + + def visit_ParamSpec(self, node: cst.ParamSpec) -> Optional[bool]: + self.scope.record_assignment(node.name.value, node) + return False + class ScopeProvider(BatchableMetadataProvider[Optional[Scope]]): """ diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 9908cb4c..5f6d485b 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -11,9 +11,11 @@ from unittest import mock import libcst as cst from libcst import ensure_type +from libcst._parser.entrypoints import is_native from libcst.metadata import MetadataWrapper from libcst.metadata.scope_provider import ( _gen_dotted_names, + AnnotationScope, Assignment, BuiltinAssignment, BuiltinScope, @@ -1982,3 +1984,228 @@ class ScopeProviderTest(UnitTest): scope.get_qualified_names_for(cst.Name("something_else")), set(), ) + + def test_type_alias_scope(self) -> None: + if not is_native(): + self.skipTest("type aliases are only supported in the native parser") + m, scopes = get_scope_metadata_provider( + """ + type A = C + lol: A + """ + ) + alias = ensure_type( + ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.TypeAlias + ) + self.assertIsInstance(scopes[alias], GlobalScope) + a_assignments = list(scopes[alias]["A"]) + self.assertEqual(len(a_assignments), 1) + lol = ensure_type( + ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.AnnAssign + ) + self.assertEqual(len(a_references := list(a_assignments[0].references)), 1) + self.assertEqual(a_references[0].node, lol.annotation.annotation) + + self.assertIsInstance(scopes[alias.value], AnnotationScope) + + def test_type_alias_param(self) -> None: + if not is_native(): + self.skipTest("type parameters are only supported in the native parser") + m, scopes = get_scope_metadata_provider( + """ + B = int + type A[T: B] = T + lol: T + """ + ) + alias = ensure_type( + ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.TypeAlias + ) + assert alias.type_parameters + param_scope = scopes[alias.type_parameters] + self.assertEqual(len(t_assignments := list(param_scope["T"])), 1) + self.assertEqual(len(t_refs := list(t_assignments[0].references)), 1) + self.assertEqual(t_refs[0].node, alias.value) + + b = ( + ensure_type( + ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.Assign + ) + .targets[0] + .target + ) + b_assignment = list(scopes[b]["B"])[0] + self.assertEqual( + {ref.node for ref in b_assignment.references}, + {ensure_type(alias.type_parameters.params[0].param, cst.TypeVar).bound}, + ) + + def test_type_alias_tuple_and_paramspec(self) -> None: + if not is_native(): + self.skipTest("type parameters are only supported in the native parser") + m, scopes = get_scope_metadata_provider( + """ + type A[*T] = T + lol: T + type A[**T] = T + lol: T + """ + ) + alias_tuple = ensure_type( + ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.TypeAlias + ) + assert alias_tuple.type_parameters + param_scope = scopes[alias_tuple.type_parameters] + self.assertEqual(len(t_assignments := list(param_scope["T"])), 1) + self.assertEqual(len(t_refs := list(t_assignments[0].references)), 1) + self.assertEqual(t_refs[0].node, alias_tuple.value) + + alias_paramspec = ensure_type( + ensure_type(m.body[2], cst.SimpleStatementLine).body[0], cst.TypeAlias + ) + assert alias_paramspec.type_parameters + param_scope = scopes[alias_paramspec.type_parameters] + self.assertEqual(len(t_assignments := list(param_scope["T"])), 1) + self.assertEqual(len(t_refs := list(t_assignments[0].references)), 1) + self.assertEqual(t_refs[0].node, alias_paramspec.value) + + def test_class_type_params(self) -> None: + if not is_native(): + self.skipTest("type parameters are only supported in the native parser") + m, scopes = get_scope_metadata_provider( + """ + class W[T]: + def f() -> T: pass + def g[T]() -> T: pass + """ + ) + cls = ensure_type(m.body[0], cst.ClassDef) + cls_scope = scopes[cls.body.body[0]] + self.assertEqual(len(t_assignments_in_cls := list(cls_scope["T"])), 1) + assert cls.type_parameters + self.assertEqual( + ensure_type(t_assignments_in_cls[0], Assignment).node, + cls.type_parameters.params[0].param, + ) + self.assertEqual( + len(t_refs_in_cls := list(t_assignments_in_cls[0].references)), 1 + ) + f = ensure_type(cls.body.body[0], cst.FunctionDef) + assert f.returns + self.assertEqual(t_refs_in_cls[0].node, f.returns.annotation) + + g = ensure_type(cls.body.body[1], cst.FunctionDef) + assert g.type_parameters + assert g.returns + self.assertEqual(len(t_assignments_in_g := list(scopes[g.body]["T"])), 1) + self.assertEqual( + ensure_type(t_assignments_in_g[0], Assignment).node, + g.type_parameters.params[0].param, + ) + self.assertEqual(len(t_refs_in_g := list(t_assignments_in_g[0].references)), 1) + self.assertEqual(t_refs_in_g[0].node, g.returns.annotation) + + def test_nested_class_type_params(self) -> None: + if not is_native(): + self.skipTest("type parameters are only supported in the native parser") + m, scopes = get_scope_metadata_provider( + """ + class Outer: + class Nested[T: Outer]: pass + """ + ) + outer = ensure_type(m.body[0], cst.ClassDef) + outer_refs = list(list(scopes[outer]["Outer"])[0].references) + self.assertEqual(len(outer_refs), 1) + inner = ensure_type(outer.body.body[0], cst.ClassDef) + assert inner.type_parameters + self.assertEqual( + outer_refs[0].node, + ensure_type(inner.type_parameters.params[0].param, cst.TypeVar).bound, + ) + + def test_annotation_refers_to_nested_class(self) -> None: + if not is_native(): + self.skipTest("type parameters are only supported in the native parser") + m, scopes = get_scope_metadata_provider( + """ + class Outer: + class Nested: + pass + + type Alias = Nested + + def meth1[T: Nested](self): pass + def meth2[T](self, arg: Nested): pass + """ + ) + outer = ensure_type(m.body[0], cst.ClassDef) + nested = ensure_type(outer.body.body[0], cst.ClassDef) + alias = ensure_type( + ensure_type(outer.body.body[1], cst.SimpleStatementLine).body[0], + cst.TypeAlias, + ) + self.assertIsInstance(scopes[alias.value], AnnotationScope) + nested_refs_within_alias = list(scopes[alias.value].accesses["Nested"]) + self.assertEqual(len(nested_refs_within_alias), 1) + self.assertEqual( + { + ensure_type(ref, Assignment).node + for ref in nested_refs_within_alias[0].referents + }, + {nested}, + ) + + meth1 = ensure_type(outer.body.body[2], cst.FunctionDef) + self.assertIsInstance(scopes[meth1], ClassScope) + assert meth1.type_parameters + meth1_typevar = ensure_type(meth1.type_parameters.params[0].param, cst.TypeVar) + meth1_typevar_scope = scopes[meth1_typevar] + self.assertIsInstance(meth1_typevar_scope, AnnotationScope) + nested_refs_within_meth1 = list(meth1_typevar_scope.accesses["Nested"]) + self.assertEqual(len(nested_refs_within_meth1), 1) + self.assertEqual( + { + ensure_type(ref, Assignment).node + for ref in nested_refs_within_meth1[0].referents + }, + {nested}, + ) + + meth2 = ensure_type(outer.body.body[3], cst.FunctionDef) + meth2_annotation = meth2.params.params[1].annotation + assert meth2_annotation + nested_refs_within_meth2 = list(scopes[meth2_annotation].accesses["Nested"]) + self.assertEqual(len(nested_refs_within_meth2), 1) + self.assertEqual( + { + ensure_type(ref, Assignment).node + for ref in nested_refs_within_meth2[0].referents + }, + {nested}, + ) + + def test_body_isnt_subject_to_special_annotation_rule(self) -> None: + if not is_native(): + self.skipTest("type parameters are only supported in the native parser") + m, scopes = get_scope_metadata_provider( + """ + class Outer: + class Inner: pass + def f[T: Inner](self): Inner + """ + ) + outer = ensure_type(m.body[0], cst.ClassDef) + # note: this is different from global scope + outer_scope = scopes[outer.body.body[0]] + inner_assignment = list(outer_scope["Inner"])[0] + self.assertEqual(len(inner_assignment.references), 1) + f = ensure_type(outer.body.body[1], cst.FunctionDef) + assert f.type_parameters + T = ensure_type(f.type_parameters.params[0].param, cst.TypeVar) + self.assertIs(list(inner_assignment.references)[0].node, T.bound) + + inner_in_func_body = ensure_type(f.body.body[0], cst.Expr) + f_scope = scopes[inner_in_func_body] + self.assertIn(inner_in_func_body.value, f_scope.accesses) + self.assertEqual(list(f_scope.accesses)[0].referents, set()) From f81cc8d00ec056bca3748ed229d39f51fa2d349b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20C=2E=20Silva?= <12188364+andrecsilva@users.noreply.github.com> Date: Sun, 1 Oct 2023 10:38:33 -0300 Subject: [PATCH 412/632] AddImportsVisitor: add imports before the first non-import statement (#1024) * AddImportsVisitor will now only add at the top of module - Also added new tests to cover these cases * Fixed an issue with from imports * Added a couple tests for AddImportsVisitor * Refactoring of GatherImportsVisitor * Refactors, typos and typing changes --- libcst/codemod/visitors/_add_imports.py | 120 +++++++++++++----- libcst/codemod/visitors/_gather_imports.py | 105 ++++++++------- .../visitors/tests/test_add_imports.py | 103 +++++++++++++++ 3 files changed, 250 insertions(+), 78 deletions(-) diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index 8081adf9..f734af5c 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -8,11 +8,51 @@ from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst from libcst import matchers as m, parse_statement +from libcst._nodes.statement import Import, ImportFrom, SimpleStatementLine from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer -from libcst.codemod.visitors._gather_imports import GatherImportsVisitor +from libcst.codemod.visitors._gather_imports import _GatherImportsMixin from libcst.codemod.visitors._imports import ImportItem from libcst.helpers import get_absolute_module_from_package_for_import +from libcst.helpers.common import ensure_type + + +class _GatherTopImportsBeforeStatements(_GatherImportsMixin): + """ + Works similarly to GatherImportsVisitor, but only considers imports + declared before any other statements of the module with the exception + of docstrings and __strict__ flag. + """ + + def __init__(self, context: CodemodContext) -> None: + super().__init__(context) + # Track all of the imports found in this transform + self.all_imports: List[Union[libcst.Import, libcst.ImportFrom]] = [] + + def leave_Module(self, original_node: libcst.Module) -> None: + start = 1 if _skip_first(original_node) else 0 + for stmt in original_node.body[start:]: + if m.matches( + stmt, + m.SimpleStatementLine(body=[m.ImportFrom() | m.Import()]), + ): + stmt = ensure_type(stmt, SimpleStatementLine) + # Workaround for python 3.8 and 3.9, won't accept Union for isinstance + if m.matches(stmt.body[0], m.ImportFrom()): + imp = ensure_type(stmt.body[0], ImportFrom) + self.all_imports.append(imp) + if m.matches(stmt.body[0], m.Import()): + imp = ensure_type(stmt.body[0], Import) + self.all_imports.append(imp) + else: + break + for imp in self.all_imports: + if m.matches(imp, m.Import()): + imp = ensure_type(imp, Import) + self._handle_Import(imp) + else: + imp = ensure_type(imp, ImportFrom) + self._handle_ImportFrom(imp) class AddImportsVisitor(ContextAwareTransformer): @@ -169,12 +209,12 @@ class AddImportsVisitor(ContextAwareTransformer): for module in sorted(from_imports_aliases) } - # Track the list of imports found in the file + # Track the list of imports found at the top of the file self.all_imports: List[Union[libcst.Import, libcst.ImportFrom]] = [] def visit_Module(self, node: libcst.Module) -> None: - # Do a preliminary pass to gather the imports we already have - gatherer = GatherImportsVisitor(self.context) + # Do a preliminary pass to gather the imports we already have at the top + gatherer = _GatherTopImportsBeforeStatements(self.context) node.visit(gatherer) self.all_imports = gatherer.all_imports @@ -213,6 +253,10 @@ class AddImportsVisitor(ContextAwareTransformer): # There's nothing to do here! return updated_node + # Ensure this is one of the imports at the top + if original_node not in self.all_imports: + return updated_node + # Get the module we're importing as a string, see if we have work to do. module = get_absolute_module_from_package_for_import( self.context.full_package_name, updated_node @@ -260,39 +304,26 @@ class AddImportsVisitor(ContextAwareTransformer): statement_before_import_location = 0 import_add_location = 0 - # never insert an import before initial __strict__ flag - if m.matches( - orig_module, - m.Module( - body=[ - m.SimpleStatementLine( - body=[ - m.Assign( - targets=[m.AssignTarget(target=m.Name("__strict__"))] - ) - ] - ), - m.ZeroOrMore(), - ] - ), - ): - statement_before_import_location = import_add_location = 1 - # This works under the principle that while we might modify node contents, # we have yet to modify the number of statements. So we can match on the # original tree but break up the statements of the modified tree. If we # change this assumption in this visitor, we will have to change this code. - for i, statement in enumerate(orig_module.body): - if i == 0 and m.matches( - statement, m.SimpleStatementLine(body=[m.Expr(value=m.SimpleString())]) + + # Finds the location to add imports. It is the end of the first import block that occurs before any other statement (save for docstrings) + + # Never insert an import before initial __strict__ flag or docstring + if _skip_first(orig_module): + statement_before_import_location = import_add_location = 1 + + for i, statement in enumerate( + orig_module.body[statement_before_import_location:] + ): + if m.matches( + statement, m.SimpleStatementLine(body=[m.ImportFrom() | m.Import()]) ): - statement_before_import_location = import_add_location = 1 - elif isinstance(statement, libcst.SimpleStatementLine): - for possible_import in statement.body: - for last_import in self.all_imports: - if possible_import is last_import: - import_add_location = i + 1 - break + import_add_location = i + statement_before_import_location + 1 + else: + break return ( list(updated_module.body[:statement_before_import_location]), @@ -414,3 +445,28 @@ class AddImportsVisitor(ContextAwareTransformer): *statements_after_imports, ) ) + + +def _skip_first(orig_module: libcst.Module) -> bool: + # Is there a __strict__ flag or docstring at the top? + if m.matches( + orig_module, + m.Module( + body=[ + m.SimpleStatementLine( + body=[ + m.Assign(targets=[m.AssignTarget(target=m.Name("__strict__"))]) + ] + ), + m.ZeroOrMore(), + ] + ) + | m.Module( + body=[ + m.SimpleStatementLine(body=[m.Expr(value=m.SimpleString())]), + m.ZeroOrMore(), + ] + ), + ): + return True + return False diff --git a/libcst/codemod/visitors/_gather_imports.py b/libcst/codemod/visitors/_gather_imports.py index 4847afc1..6b187c53 100644 --- a/libcst/codemod/visitors/_gather_imports.py +++ b/libcst/codemod/visitors/_gather_imports.py @@ -12,43 +12,9 @@ from libcst.codemod.visitors._imports import ImportItem from libcst.helpers import get_absolute_module_from_package_for_import -class GatherImportsVisitor(ContextAwareVisitor): +class _GatherImportsMixin(ContextAwareVisitor): """ - Gathers all imports in a module and stores them as attributes on the instance. - Intended to be instantiated and passed to a :class:`~libcst.Module` - :meth:`~libcst.CSTNode.visit` method in order to gather up information about - imports on a module. Note that this is not a substitute for scope analysis or - qualified name support. Please see :ref:`libcst-scope-tutorial` for a more - robust way of determining the qualified name and definition for an arbitrary - node. - - After visiting a module the following attributes will be populated: - - module_imports - A sequence of strings representing modules that were imported directly, such as - in the case of ``import typing``. Each module directly imported but not aliased - will be included here. - object_mapping - A mapping of strings to sequences of strings representing modules where we - imported objects from, such as in the case of ``from typing import Optional``. - Each from import that was not aliased will be included here, where the keys of - the mapping are the module we are importing from, and the value is a - sequence of objects we are importing from the module. - module_aliases - A mapping of strings representing modules that were imported and aliased, - such as in the case of ``import typing as t``. Each module imported this - way will be represented as a key in this mapping, and the value will be - the local alias of the module. - alias_mapping - A mapping of strings to sequences of tuples representing modules where we - imported objects from and aliased using ``as`` syntax, such as in the case - of ``from typing import Optional as opt``. Each from import that was aliased - will be included here, where the keys of the mapping are the module we are - importing from, and the value is a tuple representing the original object - name and the alias. - all_imports - A collection of all :class:`~libcst.Import` and :class:`~libcst.ImportFrom` - statements that were encountered in the module. + A Mixin class for tracking visited imports. """ def __init__(self, context: CodemodContext) -> None: @@ -59,15 +25,10 @@ class GatherImportsVisitor(ContextAwareVisitor): # Track the aliased imports in this transform self.module_aliases: Dict[str, str] = {} self.alias_mapping: Dict[str, List[Tuple[str, str]]] = {} - # Track all of the imports found in this transform - self.all_imports: List[Union[libcst.Import, libcst.ImportFrom]] = [] # Track the import for every symbol introduced into the module self.symbol_mapping: Dict[str, ImportItem] = {} - def visit_Import(self, node: libcst.Import) -> None: - # Track this import statement for later analysis. - self.all_imports.append(node) - + def _handle_Import(self, node: libcst.Import) -> None: for name in node.names: alias = name.evaluated_alias imp = ImportItem(name.evaluated_name, alias=alias) @@ -80,10 +41,7 @@ class GatherImportsVisitor(ContextAwareVisitor): self.module_imports.add(name.evaluated_name) self.symbol_mapping[name.evaluated_name] = imp - def visit_ImportFrom(self, node: libcst.ImportFrom) -> None: - # Track this import statement for later analysis. - self.all_imports.append(node) - + def _handle_ImportFrom(self, node: libcst.ImportFrom) -> None: # Get the module we're importing as a string. module = get_absolute_module_from_package_for_import( self.context.full_package_name, node @@ -128,3 +86,58 @@ class GatherImportsVisitor(ContextAwareVisitor): ) key = ia.evaluated_alias or ia.evaluated_name self.symbol_mapping[key] = imp + + +class GatherImportsVisitor(_GatherImportsMixin): + """ + Gathers all imports in a module and stores them as attributes on the instance. + Intended to be instantiated and passed to a :class:`~libcst.Module` + :meth:`~libcst.CSTNode.visit` method in order to gather up information about + imports on a module. Note that this is not a substitute for scope analysis or + qualified name support. Please see :ref:`libcst-scope-tutorial` for a more + robust way of determining the qualified name and definition for an arbitrary + node. + + After visiting a module the following attributes will be populated: + + module_imports + A sequence of strings representing modules that were imported directly, such as + in the case of ``import typing``. Each module directly imported but not aliased + will be included here. + object_mapping + A mapping of strings to sequences of strings representing modules where we + imported objects from, such as in the case of ``from typing import Optional``. + Each from import that was not aliased will be included here, where the keys of + the mapping are the module we are importing from, and the value is a + sequence of objects we are importing from the module. + module_aliases + A mapping of strings representing modules that were imported and aliased, + such as in the case of ``import typing as t``. Each module imported this + way will be represented as a key in this mapping, and the value will be + the local alias of the module. + alias_mapping + A mapping of strings to sequences of tuples representing modules where we + imported objects from and aliased using ``as`` syntax, such as in the case + of ``from typing import Optional as opt``. Each from import that was aliased + will be included here, where the keys of the mapping are the module we are + importing from, and the value is a tuple representing the original object + name and the alias. + all_imports + A collection of all :class:`~libcst.Import` and :class:`~libcst.ImportFrom` + statements that were encountered in the module. + """ + + def __init__(self, context: CodemodContext) -> None: + super().__init__(context) + # Track all of the imports found in this transform + self.all_imports: List[Union[libcst.Import, libcst.ImportFrom]] = [] + + def visit_Import(self, node: libcst.Import) -> None: + # Track this import statement for later analysis. + self.all_imports.append(node) + self._handle_Import(node) + + def visit_ImportFrom(self, node: libcst.ImportFrom) -> None: + # Track this import statement for later analysis. + self.all_imports.append(node) + self._handle_ImportFrom(node) diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index 0682fa51..613da907 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -923,3 +923,106 @@ class TestAddImportsCodemod(CodemodTest): full_module_name="a.b.foobar", full_package_name="a.b" ), ) + + def test_add_at_first_block(self) -> None: + """ + Should add the import only at the end of the first import block. + """ + + before = """ + import a + import b + + e() + + import c + import d + """ + + after = """ + import a + import b + import e + + e() + + import c + import d + """ + + self.assertCodemod(before, after, [ImportItem("e", None, None)]) + + def test_add_no_import_block_before_statement(self) -> None: + """ + Should add the import before the call. + """ + + before = """ + '''docstring''' + e() + import a + import b + """ + + after = """ + '''docstring''' + import c + + e() + import a + import b + """ + + self.assertCodemod(before, after, [ImportItem("c", None, None)]) + + def test_do_not_add_existing(self) -> None: + """ + Should not add the new object import at existing import since it's not at the top + """ + + before = """ + '''docstring''' + e() + import a + import b + from c import f + """ + + after = """ + '''docstring''' + from c import e + + e() + import a + import b + from c import f + """ + + self.assertCodemod(before, after, [ImportItem("c", "e", None)]) + + def test_add_existing_at_top(self) -> None: + """ + Should add new import at exisitng from import at top + """ + + before = """ + '''docstring''' + from c import d + e() + import a + import b + from c import f + """ + + after = """ + '''docstring''' + from c import e, x, d + e() + import a + import b + from c import f + """ + + self.assertCodemod( + before, after, [ImportItem("c", "x", None), ImportItem("c", "e", None)] + ) From e1da64b53e3941d3756607c41cc4a0b1cafc73f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Oct 2023 14:40:48 +0100 Subject: [PATCH 413/632] Bump ufmt from 2.1.0 to 2.2.0 (#1005) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.1.0 to 2.2.0. - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.1.0...v2.2.0) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8dde45c5..ced62552 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ dev = [ "pyre-check==0.9.10; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", - "ufmt==2.1.0", + "ufmt==2.2.0", "usort==1.0.7", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", From 552af63d2923d390c678d9d1ec2123e21e7f21a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20C=2E=20Silva?= <12188364+andrecsilva@users.noreply.github.com> Date: Sun, 1 Oct 2023 14:34:42 -0300 Subject: [PATCH 414/632] ScopeProvider: Record Access for Attributes and Decorators (#1019) * Support for Attributes and Decorators in _NameUtil * Replaced _NameUtil with get_full_name_for_node * Added tests --- libcst/metadata/scope_provider.py | 21 +---------- libcst/metadata/tests/test_scope_provider.py | 39 ++++++++++++++++++++ 2 files changed, 41 insertions(+), 19 deletions(-) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 73bb61f5..75f37a06 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -330,7 +330,7 @@ class Assignments: def __getitem__(self, node: Union[str, cst.CSTNode]) -> Collection[BaseAssignment]: """Get assignments given a name str or :class:`~libcst.CSTNode` by ``scope.assignments[node]``""" - name = _NameUtil.get_name_for(node) + name = get_full_name_for_node(node) return set(self._assignments[name]) if name in self._assignments else set() def __contains__(self, node: Union[str, cst.CSTNode]) -> bool: @@ -352,7 +352,7 @@ class Accesses: def __getitem__(self, node: Union[str, cst.CSTNode]) -> Collection[Access]: """Get accesses given a name str or :class:`~libcst.CSTNode` by ``scope.accesses[node]``""" - name = _NameUtil.get_name_for(node) + name = get_full_name_for_node(node) return self._accesses[name] if name in self._accesses else set() def __contains__(self, node: Union[str, cst.CSTNode]) -> bool: @@ -360,23 +360,6 @@ class Accesses: return len(self[node]) > 0 -class _NameUtil: - @staticmethod - def get_name_for(node: Union[str, cst.CSTNode]) -> Optional[str]: - """A helper function to retrieve simple name str from a CSTNode or str""" - if isinstance(node, cst.Name): - return node.value - elif isinstance(node, str): - return node - elif isinstance(node, cst.Call): - return _NameUtil.get_name_for(node.func) - elif isinstance(node, cst.Subscript): - return _NameUtil.get_name_for(node.value) - elif isinstance(node, (cst.FunctionDef, cst.ClassDef)): - return _NameUtil.get_name_for(node.name) - return None - - class Scope(abc.ABC): """ Base class of all scope classes. Scope object stores assignments from imports, diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index 5f6d485b..a2087645 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -253,6 +253,45 @@ class ScopeProviderTest(UnitTest): self.assertEqual(list(scope_of_module["x.y"])[0].references, set()) self.assertEqual(scope_of_module.accesses["x.y"], set()) + def test_dotted_import_access_reference_by_node(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + import a.b.c + a.b.c() + """ + ) + scope_of_module = scopes[m] + first_statement = ensure_type(m.body[1], cst.SimpleStatementLine) + call = ensure_type( + ensure_type(first_statement.body[0], cst.Expr).value, cst.Call + ) + + a_b_c_assignment = cast(ImportAssignment, list(scope_of_module["a.b.c"])[0]) + a_b_c_access = list(a_b_c_assignment.references)[0] + self.assertEqual(scope_of_module.accesses[call], {a_b_c_access}) + self.assertEqual(a_b_c_access.node, call.func) + + def test_decorator_access_reference_by_node(self) -> None: + m, scopes = get_scope_metadata_provider( + """ + import decorator + + @decorator + def f(): + pass + """ + ) + scope_of_module = scopes[m] + function_def = ensure_type(m.body[1], cst.FunctionDef) + decorator = function_def.decorators[0] + self.assertTrue("decorator" in scope_of_module) + + decorator_assignment = cast( + ImportAssignment, list(scope_of_module["decorator"])[0] + ) + decorator_access = list(decorator_assignment.references)[0] + self.assertEqual(scope_of_module.accesses[decorator], {decorator_access}) + def test_dotted_import_with_call_access(self) -> None: m, scopes = get_scope_metadata_provider( """ From 03179b55ebe7e916f1722e18e8f0b87c01616d1f Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 1 Oct 2023 20:58:40 +0100 Subject: [PATCH 415/632] Parse arbitrarily nested f-strings (#1026) --- native/libcst/src/tokenizer/core/mod.rs | 23 ++++++------------- native/libcst/src/tokenizer/tests.rs | 22 ++++++++++++++++++ native/libcst/tests/fixtures/super_strings.py | 9 +++++++- 3 files changed, 37 insertions(+), 17 deletions(-) diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 2365eaa3..067c1cf9 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -907,8 +907,8 @@ impl<'t> TokState<'t> { return Err(TokError::UnterminatedString); } (ch @ Some('\''), _) | (ch @ Some('"'), _) => { - // see if this actually terminates something in fstring_stack - for node in self.fstring_stack.iter() { + // see if this actually terminates the most recent fstring + if let Some(node) = self.fstring_stack.last() { if ch == Some(node.quote_char.into()) { match node.quote_size { StringQuoteSize::Single => { @@ -1004,27 +1004,18 @@ impl<'t> TokState<'t> { fn maybe_consume_fstring_end(&mut self) -> Option { let ch = self.text_pos.peek(); - let mut match_idx = None; - for (idx, node) in self.fstring_stack.iter().enumerate() { + if let Some(node) = self.fstring_stack.last() { if ch == Some(node.quote_char.into()) { if node.quote_size == StringQuoteSize::Triple { - if self.text_pos.consume(node.quote_char.triple_str()) { - match_idx = Some(idx); - break; - } + self.text_pos.consume(node.quote_char.triple_str()); } else { self.text_pos.next(); // already matched - match_idx = Some(idx); - break; } + self.fstring_stack.pop(); + return Some(TokType::FStringEnd); } } - if let Some(match_idx) = match_idx { - self.fstring_stack.truncate(match_idx); - Some(TokType::FStringEnd) - } else { - None - } + None } } diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index a24b977b..af79971d 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -853,3 +853,25 @@ fn test_nested_f_string_specs() { ]) ) } + +#[test] +fn test_nested_f_strings() { + let config = TokConfig { + split_fstring: true, + ..default_config() + }; + assert_eq!( + tokenize_all("f'{f'{2}'}'", &config), + Ok(vec![ + (TokType::FStringStart, "f'"), + (TokType::Op, "{"), + (TokType::FStringStart, "f'"), + (TokType::Op, "{"), + (TokType::Number, "2"), + (TokType::Op, "}"), + (TokType::FStringEnd, "'"), + (TokType::Op, "}"), + (TokType::FStringEnd, "'") + ]) + ) +} diff --git a/native/libcst/tests/fixtures/super_strings.py b/native/libcst/tests/fixtures/super_strings.py index 82457279..c1471a45 100644 --- a/native/libcst/tests/fixtures/super_strings.py +++ b/native/libcst/tests/fixtures/super_strings.py @@ -31,4 +31,11 @@ f'\{{\}}' f"regexp_like(path, '.*\{file_type}$')" f"\lfoo" -f"{_:{_:}{a}}" \ No newline at end of file +f"{_:{_:}{a}}" + +f"foo {f"bar {x}"} baz" +f'some words {a+b:.3f} more words {c+d=} final words' +f"{'':*^{1:{1}}}" +f"{'':*^{1:{1:{1}}}}" +f"{f"{f"{f"{f"{f"{1+1}"}"}"}"}"}" + From 74e8a0e7c063539756ee0d2b6f52019eaea693c2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Oct 2023 08:03:49 -0700 Subject: [PATCH 416/632] Bump pypa/cibuildwheel from 2.16.0 to 2.16.1 (#1029) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.0 to 2.16.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.0...v2.16.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4b466a03..431074ab 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -58,7 +58,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.16.0 + uses: pypa/cibuildwheel@v2.16.1 - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl From face393db016c56ca375c743d92eab63387fc19f Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 2 Oct 2023 08:05:33 -0700 Subject: [PATCH 417/632] eliminate relative paths from Cargo.toml (#1031) * eliminate relative paths from Cargo.toml * fix paths in LICENSE files --- LICENSE | 4 +- native/libcst/Cargo.toml | 2 +- native/libcst/LICENSE | 102 ++++++++++++++++++++++++++++++++ native/libcst_derive/Cargo.toml | 2 +- native/libcst_derive/LICENSE | 102 ++++++++++++++++++++++++++++++++ 5 files changed, 208 insertions(+), 4 deletions(-) create mode 100644 native/libcst/LICENSE create mode 100644 native/libcst_derive/LICENSE diff --git a/LICENSE b/LICENSE index ad52f20b..5594616f 100644 --- a/LICENSE +++ b/LICENSE @@ -13,8 +13,8 @@ PSF). These files are: - libcst/_parser/parso/tests/test_fstring.py - libcst/_parser/parso/tests/test_tokenize.py - libcst/_parser/parso/tests/test_utils.py -- libcst_native/src/tokenize/core/mod.rs -- libcst_native/src/tokenize/core/string_types.rs +- native/libcst/src/tokenizer/core/mod.rs +- native/libcst/src/tokenizer/core/string_types.rs Some Python files have been taken from dataclasses and are therefore Apache licensed. Modifications on these files are licensed under Apache 2.0 license. diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index b52b9015..aa89f492 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -10,7 +10,7 @@ authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" description = "A Python parser and Concrete Syntax Tree library." -license-file = "../../LICENSE" +license-file = "LICENSE" homepage = "https://github.com/Instagram/LibCST" documentation = "https://libcst.rtfd.org" keywords = ["python", "cst", "ast"] diff --git a/native/libcst/LICENSE b/native/libcst/LICENSE new file mode 100644 index 00000000..5594616f --- /dev/null +++ b/native/libcst/LICENSE @@ -0,0 +1,102 @@ +All contributions towards LibCST are MIT licensed. + +Some Python files have been derived from the standard library and are therefore +PSF licensed. Modifications on these files are dual licensed (both MIT and +PSF). These files are: + +- libcst/_parser/base_parser.py +- libcst/_parser/parso/utils.py +- libcst/_parser/parso/pgen2/generator.py +- libcst/_parser/parso/pgen2/grammar_parser.py +- libcst/_parser/parso/python/py_token.py +- libcst/_parser/parso/python/tokenize.py +- libcst/_parser/parso/tests/test_fstring.py +- libcst/_parser/parso/tests/test_tokenize.py +- libcst/_parser/parso/tests/test_utils.py +- native/libcst/src/tokenizer/core/mod.rs +- native/libcst/src/tokenizer/core/string_types.rs + +Some Python files have been taken from dataclasses and are therefore Apache +licensed. Modifications on these files are licensed under Apache 2.0 license. +These files are: + +- libcst/_add_slots.py + +------------------------------------------------------------------------------- + +MIT License + +Copyright (c) Meta Platforms, Inc. and affiliates. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------------------------------------------------------------------------------- + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved" +are retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + +------------------------------------------------------------------------------- + +APACHE LICENSE, VERSION 2.0 + +http://www.apache.org/licenses/LICENSE-2.0 diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index d3453032..2b5c61a3 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -3,7 +3,7 @@ name = "libcst_derive" version = "0.1.0" edition = "2018" description = "Proc macro helpers for libcst." -license-file = "../../LICENSE" +license-file = "LICENSE" homepage = "https://github.com/Instagram/LibCST" documentation = "https://libcst.rtfd.org" keywords = ["macros", "python"] diff --git a/native/libcst_derive/LICENSE b/native/libcst_derive/LICENSE new file mode 100644 index 00000000..5594616f --- /dev/null +++ b/native/libcst_derive/LICENSE @@ -0,0 +1,102 @@ +All contributions towards LibCST are MIT licensed. + +Some Python files have been derived from the standard library and are therefore +PSF licensed. Modifications on these files are dual licensed (both MIT and +PSF). These files are: + +- libcst/_parser/base_parser.py +- libcst/_parser/parso/utils.py +- libcst/_parser/parso/pgen2/generator.py +- libcst/_parser/parso/pgen2/grammar_parser.py +- libcst/_parser/parso/python/py_token.py +- libcst/_parser/parso/python/tokenize.py +- libcst/_parser/parso/tests/test_fstring.py +- libcst/_parser/parso/tests/test_tokenize.py +- libcst/_parser/parso/tests/test_utils.py +- native/libcst/src/tokenizer/core/mod.rs +- native/libcst/src/tokenizer/core/string_types.rs + +Some Python files have been taken from dataclasses and are therefore Apache +licensed. Modifications on these files are licensed under Apache 2.0 license. +These files are: + +- libcst/_add_slots.py + +------------------------------------------------------------------------------- + +MIT License + +Copyright (c) Meta Platforms, Inc. and affiliates. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------------------------------------------------------------------------------- + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved" +are retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + +------------------------------------------------------------------------------- + +APACHE LICENSE, VERSION 2.0 + +http://www.apache.org/licenses/LICENSE-2.0 From 738dc2f893da69150d71e964831e7b6e594b86fb Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 2 Oct 2023 09:43:17 -0700 Subject: [PATCH 418/632] Upgrade pyre (#1032) * Upgrade pyre * regen fixtures --- libcst/_nodes/module.py | 1 - .../codemod/commands/convert_type_comments.py | 6 +- libcst/matchers/_matcher_base.py | 61 ++++--------------- libcst/matchers/_visitors.py | 5 -- libcst/tests/pyre/simple_class.json | 30 +-------- libcst/tool.py | 4 ++ pyproject.toml | 2 +- 7 files changed, 20 insertions(+), 89 deletions(-) diff --git a/libcst/_nodes/module.py b/libcst/_nodes/module.py index 149a4375..9ed45716 100644 --- a/libcst/_nodes/module.py +++ b/libcst/_nodes/module.py @@ -79,7 +79,6 @@ class Module(CSTNode): has_trailing_newline=self.has_trailing_newline, ) - # pyre-fixme[14]: `visit` overrides method defined in `CSTNode` inconsistently. def visit(self: _ModuleSelfT, visitor: CSTVisitorT) -> _ModuleSelfT: """ Returns the result of running a visitor over this module. diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index 8335160e..e2c6e71c 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -52,7 +52,6 @@ def _parse_type_comment( if type_comment is None: return None try: - # pyre-ignore[16]: the ast module stubs do not have full details return ast.parse(type_comment, "", "eval").body except SyntaxError: return None @@ -69,10 +68,7 @@ def _parse_func_type_comment( ) -> Optional["ast.FunctionType"]: if func_type_comment is None: return None - return cast( - ast.FunctionType, - ast.parse(func_type_comment, "", "func_type"), - ) + return ast.parse(func_type_comment, "", "func_type") @functools.lru_cache() diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 666ceab0..039694a5 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -69,7 +69,6 @@ class AbstractBaseMatcherNodeMeta(ABCMeta): matcher. """ - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, node: Type["BaseMatcherNode"]) -> "TypeOf[Type[BaseMatcherNode]]": return TypeOf(self, node) @@ -84,25 +83,16 @@ class BaseMatcherNode: several concrete matchers as options. """ - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self: _BaseMatcherNodeSelfT, other: _OtherNodeT ) -> "OneOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]]": - # Without a cast, pyre thinks that the below OneOf is type OneOf[object] - # even though it has the types passed into it. - return cast( - OneOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]], OneOf(self, other) - ) + return OneOf(self, other) def __and__( self: _BaseMatcherNodeSelfT, other: _OtherNodeT ) -> "AllOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]]": - # Without a cast, pyre thinks that the below AllOf is type AllOf[object] - # even though it has the types passed into it. - return cast( - AllOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]], AllOf(self, other) - ) + return AllOf(self, other) def __invert__(self: _BaseMatcherNodeSelfT) -> "_BaseMatcherNodeSelfT": return cast(_BaseMatcherNodeSelfT, _InverseOf(self)) @@ -180,7 +170,6 @@ class TypeOf(Generic[_MatcherTypeT], BaseMatcherNode): self._call_items = (args, kwargs) return self - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeMatcherTypeT @@ -240,19 +229,16 @@ class OneOf(Generic[_MatcherT], BaseMatcherNode): """ return self._options - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": - # Without a cast, pyre thinks that the below OneOf is type OneOf[object] - # even though it has the types passed into it. - return cast(OneOf[Union[_MatcherT, _OtherNodeT]], OneOf(self, other)) + return OneOf(self, other) def __and__(self, other: _OtherNodeT) -> NoReturn: raise Exception("Cannot use AllOf and OneOf in combination!") def __invert__(self) -> "AllOf[_MatcherT]": # Invert using De Morgan's Law so we don't have to complicate types. - return cast(AllOf[_MatcherT], AllOf(*[DoesNotMatch(m) for m in self._options])) + return AllOf(*[DoesNotMatch(m) for m in self._options]) def __repr__(self) -> str: return f"OneOf({', '.join([repr(o) for o in self._options])})" @@ -318,19 +304,16 @@ class AllOf(Generic[_MatcherT], BaseMatcherNode): """ return self._options - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> NoReturn: raise Exception("Cannot use AllOf and OneOf in combination!") def __and__(self, other: _OtherNodeT) -> "AllOf[Union[_MatcherT, _OtherNodeT]]": - # Without a cast, pyre thinks that the below AllOf is type AllOf[object] - # even though it has the types passed into it. - return cast(AllOf[Union[_MatcherT, _OtherNodeT]], AllOf(self, other)) + return AllOf(self, other) def __invert__(self) -> "OneOf[_MatcherT]": # Invert using De Morgan's Law so we don't have to complicate types. - return cast(OneOf[_MatcherT], OneOf(*[DoesNotMatch(m) for m in self._options])) + return OneOf(*[DoesNotMatch(m) for m in self._options]) def __repr__(self) -> str: return f"AllOf({', '.join([repr(o) for o in self._options])})" @@ -367,7 +350,6 @@ class _InverseOf(Generic[_MatcherT]): """ return self._matcher - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": # Without a cast, pyre thinks that the below OneOf is type OneOf[object] @@ -438,7 +420,6 @@ class _ExtractMatchingNode(Generic[_MatcherT]): """ return self._name - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": # Without a cast, pyre thinks that the below OneOf is type OneOf[object] @@ -512,25 +493,16 @@ class MatchIfTrue(Generic[_MatchIfTrueT]): """ return self._func - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeT ) -> "OneOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]]": - # Without a cast, pyre thinks that the below OneOf is type OneOf[object] - # even though it has the types passed into it. - return cast( - OneOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]], OneOf(self, other) - ) + return OneOf(self, other) def __and__( self, other: _OtherNodeT ) -> "AllOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]]": - # Without a cast, pyre thinks that the below AllOf is type AllOf[object] - # even though it has the types passed into it. - return cast( - AllOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]], AllOf(self, other) - ) + return AllOf(self, other) def __invert__(self) -> "MatchIfTrue[_MatchIfTrueT]": # Construct a wrapped version of MatchIfTrue for typing simplicity. @@ -560,7 +532,6 @@ def MatchRegex(regex: Union[str, Pattern[str]]) -> MatchIfTrue[str]: def _match_func(value: object) -> bool: if isinstance(value, str): - # pyre-ignore Pyre doesn't think a 'Pattern' can be passed to fullmatch. return bool(re.fullmatch(regex, value)) else: return False @@ -642,15 +613,12 @@ class MatchMetadata(_BaseMetadataMatcher): """ return self._value - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[MatchMetadata, _OtherNodeT]]": - # Without the cast, pyre doesn't know this is valid - return cast(OneOf[Union[MatchMetadata, _OtherNodeT]], OneOf(self, other)) + return OneOf(self, other) def __and__(self, other: _OtherNodeT) -> "AllOf[Union[MatchMetadata, _OtherNodeT]]": - # Without the cast, pyre doesn't know this is valid - return cast(AllOf[Union[MatchMetadata, _OtherNodeT]], AllOf(self, other)) + return AllOf(self, other) def __invert__(self) -> "MatchMetadata": # We intentionally lie here, for the same reason given in the documentation @@ -728,19 +696,16 @@ class MatchMetadataIfTrue(_BaseMetadataMatcher): """ return self._func - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeT ) -> "OneOf[Union[MatchMetadataIfTrue, _OtherNodeT]]": - # Without the cast, pyre doesn't know this is valid - return cast(OneOf[Union[MatchMetadataIfTrue, _OtherNodeT]], OneOf(self, other)) + return OneOf(self, other) def __and__( self, other: _OtherNodeT ) -> "AllOf[Union[MatchMetadataIfTrue, _OtherNodeT]]": - # Without the cast, pyre doesn't know this is valid - return cast(AllOf[Union[MatchMetadataIfTrue, _OtherNodeT]], AllOf(self, other)) + return AllOf(self, other) def __invert__(self) -> "MatchMetadataIfTrue": # Construct a wrapped version of MatchMetadataIfTrue for typing simplicity. @@ -817,7 +782,6 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): """ return self._matcher - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: object) -> NoReturn: raise Exception("AtLeastN cannot be used in a OneOf matcher") @@ -921,7 +885,6 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): """ return self._matcher - # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: object) -> NoReturn: raise Exception("AtMostN cannot be used in a OneOf matcher") diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index a491ffd1..9349c5b5 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -132,7 +132,6 @@ def _verify_return_annotation( # it is "None". if type_hints.get("return", type(None)) is not type(None): # noqa: E721 raise MatchDecoratorMismatch( - # pyre-fixme[16]: Anonymous callable has no attribute `__qualname__`. meth.__qualname__, f"@{decorator_name} should only decorate functions that do " + "not return.", @@ -181,7 +180,6 @@ def _verify_parameter_annotations( meth_signature = signature(meth) if len(meth_signature.parameters) != expected_param_count: raise MatchDecoratorMismatch( - # pyre-fixme[16]: Anonymous callable has no attribute `__qualname__`. meth.__qualname__, f"@{decorator_name} should decorate functions which take " + f"{expected_param_count} parameter" @@ -238,8 +236,6 @@ def _check_types( # First thing first, make sure this isn't wrapping an inner class. if not ismethod(meth): raise MatchDecoratorMismatch( - # pyre-fixme[16]: Anonymous callable has no attribute - # `__qualname__`. meth.__qualname__, "Matcher decorators should only be used on methods of " + "MatcherDecoratableTransformer or " @@ -292,7 +288,6 @@ def _assert_not_concrete( ) -> None: if func.__name__ in CONCRETE_METHODS: raise MatchDecoratorMismatch( - # pyre-ignore This anonymous method has a qualname. func.__qualname__, f"@{decorator_name} should not decorate functions that are concrete " + "visit or leave methods.", diff --git a/libcst/tests/pyre/simple_class.json b/libcst/tests/pyre/simple_class.json index 878ed5eb..85192559 100644 --- a/libcst/tests/pyre/simple_class.json +++ b/libcst/tests/pyre/simple_class.json @@ -79,7 +79,7 @@ } }, { - "annotation": "None", + "annotation": "typing.Type[None]", "location": { "start": { "column": 34, @@ -208,19 +208,6 @@ } } }, - { - "annotation": "BoundMethod[typing.Callable(typing.GenericMeta.__getitem__)[[Named(self, unknown), typing.Type[Variable[typing._T_co](covariant)]], typing.Type[typing.Sequence[Variable[typing._T_co](covariant)]]], typing.Type[typing.Sequence]]", - "location": { - "start": { - "column": 35, - "line": 16 - }, - "stop": { - "column": 43, - "line": 16 - } - } - }, { "annotation": "typing.Type[typing.Sequence[simple_class.Item]]", "location": { @@ -234,19 +221,6 @@ } } }, - { - "annotation": "typing.Type[simple_class.Item]", - "location": { - "start": { - "column": 44, - "line": 16 - }, - "stop": { - "column": 48, - "line": 16 - } - } - }, { "annotation": "typing.List[simple_class.Item]", "location": { @@ -469,7 +443,7 @@ } }, { - "annotation": "typing.Sequence[simple_class.Item]", + "annotation": "simple_class.Item", "location": { "start": { "column": 12, diff --git a/libcst/tool.py b/libcst/tool.py index 5aa4d12f..64144cba 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -85,6 +85,8 @@ def _node_repr_recursive( # noqa: C901 def _get_default(fld: "dataclasses.Field[object]") -> object: if fld.default_factory is not dataclasses.MISSING: + # pyre-fixme[29]: `Union[dataclasses._MISSING_TYPE, + # dataclasses._DefaultFactory[object]]` is not a function. return fld.default_factory() return fld.default @@ -783,6 +785,8 @@ def _list_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 continue # Grab the path, try to import all of the files inside of it. + # pyre-fixme[6]: For 1st argument expected `PathLike[Variable[AnyStr <: + # [str, bytes]]]` but got `Optional[str]`. path = os.path.dirname(os.path.abspath(imported_module.__file__)) for name, imported_module in _recursive_find(path, module): for objname in dir(imported_module): diff --git a/pyproject.toml b/pyproject.toml index ced62552..c3bb9bda 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ dev = [ "maturin>=0.8.3,<0.16", "nbsphinx>=0.4.2", "prompt-toolkit>=2.0.9", - "pyre-check==0.9.10; platform_system != 'Windows'", + "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", "ufmt==2.2.0", From 5df1569a40c055c365478ab247d0841a9785124a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 2 Oct 2023 10:33:29 -0700 Subject: [PATCH 419/632] Parse multiline expressions in f-strings (#1027) --- native/libcst/src/tokenizer/core/mod.rs | 6 ++++-- native/libcst/src/tokenizer/core/string_types.rs | 2 +- native/libcst/tests/fixtures/super_strings.py | 11 ++++++++++- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 067c1cf9..477bd868 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -363,7 +363,8 @@ impl<'t> TokState<'t> { self.text_pos.next(); self.at_bol = true; if self.split_fstring - && !self.fstring_stack.iter().all(|node| node.allow_multiline()) + && self.fstring_stack.last().map(|node| node.allow_multiline()) + == Some(false) { Err(TokError::UnterminatedString) } else if self.blank_line || !self.paren_stack.is_empty() { @@ -895,7 +896,8 @@ impl<'t> TokState<'t> { is_in_format_spec: bool, is_raw_string: bool, ) -> Result, TokError<'t>> { - let allow_multiline = self.fstring_stack.iter().all(|node| node.allow_multiline()); + let allow_multiline = + self.fstring_stack.last().map(|node| node.allow_multiline()) == Some(true); let mut in_named_unicode: bool = false; let mut ok_result = Ok(None); // value to return if we reach the end and don't error out 'outer: loop { diff --git a/native/libcst/src/tokenizer/core/string_types.rs b/native/libcst/src/tokenizer/core/string_types.rs index d14d13f5..b04ccb39 100644 --- a/native/libcst/src/tokenizer/core/string_types.rs +++ b/native/libcst/src/tokenizer/core/string_types.rs @@ -105,7 +105,7 @@ impl FStringNode { } pub fn allow_multiline(&self) -> bool { - self.quote_size == StringQuoteSize::Triple + self.quote_size == StringQuoteSize::Triple || self.is_in_expr() } pub fn is_in_expr(&self) -> bool { diff --git a/native/libcst/tests/fixtures/super_strings.py b/native/libcst/tests/fixtures/super_strings.py index c1471a45..25eee0e0 100644 --- a/native/libcst/tests/fixtures/super_strings.py +++ b/native/libcst/tests/fixtures/super_strings.py @@ -27,6 +27,16 @@ _(f"ok { expr = !r: aosidjhoi } end") print(f"{self.ERASE_CURRENT_LINE}{self._human_seconds(elapsed_time)} {percent:.{self.pretty_precision}f}% complete, {self.estimate_completion(elapsed_time, finished, left)} estimated for {left} files to go...") +f"{"\n".join()}" + +f"___{ + x +}___" + +f"___{( + x +)}___" + f'\{{\}}' f"regexp_like(path, '.*\{file_type}$')" f"\lfoo" @@ -38,4 +48,3 @@ f'some words {a+b:.3f} more words {c+d=} final words' f"{'':*^{1:{1}}}" f"{'':*^{1:{1:{1}}}}" f"{f"{f"{f"{f"{f"{1+1}"}"}"}"}"}" - From 7ca5d7f1736c38b27dcdd41243343f2e2b5e1eb1 Mon Sep 17 00:00:00 2001 From: Sergii Dymchenko Date: Thu, 5 Oct 2023 15:46:25 -0700 Subject: [PATCH 420/632] Fix link in type_inference_provider.py (#1035) Same change as https://github.com/Instagram/LibCST/pull/913, but in the docstring. --- libcst/metadata/type_inference_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index 6f555fdf..06ed0c19 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -39,7 +39,7 @@ class PyreData(TypedDict, total=False): class TypeInferenceProvider(BatchableMetadataProvider[str]): """ Access inferred type annotation through `Pyre Query API `_. - It requires `setup watchman `_ + It requires `setup watchman `_ and start pyre server by running ``pyre`` command. The inferred type is a string of `type annotation `_. E.g. ``typing.List[libcst._nodes.expression.Name]`` From 8d4229d959aabd888a4fa1e981bd204f14e16543 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 5 Oct 2023 19:16:12 -0700 Subject: [PATCH 421/632] bump version to 1.1.0 (#1037) --- CHANGELOG.md | 25 +++++++++++++++++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 30 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 12eed756..8e99fccf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,28 @@ +# 1.1.0 - 2023-10-05 + +## Added +* PEP 695 support + * parser: PEP 695 - Type Parameter Syntax #1004 + * Scope provider: support for type annotations #1014 +* PEP 701 support + * parser: support arbitrarily nested f-strings #1026 + * parser: Parse multiline expressions in f-strings #1027 +* parser: Support files with mixed newlines #1007 +* [libcst](https://crates.io/crates/libcst) is now published to crates.io + +## Fixed +* codemod/ApplyTypeAnnotationsVisitor: Do not annotate the same variable multiple times #956 +* parser: Don't swallow trailing whitespace #976 +* codemod/rename: Avoid duplicating import statements when the module name doesn't change #981 + +## Updated +* cli: Don't gather dirs ending .py #994 +* drop support for Python 3.7 #997 +* A few parser performance improvements: + * Switch to using thread_local regular expressions to stop mutext contention #996 + * Remove need for regex in TextPosition::matches #1002 + * Remove Regexes from whitespace parser #1008 + # 1.0.1 - 2023-06-07 ## Fixed diff --git a/native/Cargo.lock b/native/Cargo.lock index 6b03c3f7..6d360335 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -349,7 +349,7 @@ checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "libcst" -version = "0.1.0" +version = "1.1.0" dependencies = [ "chic", "criterion", @@ -367,7 +367,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "0.1.0" +version = "1.1.0" dependencies = [ "quote", "syn", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index aa89f492..e1fe3b13 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "0.1.0" +version = "1.1.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" memchr = "2.5.0" -libcst_derive = { path = "../libcst_derive", version = "0.1.0" } +libcst_derive = { path = "../libcst_derive", version = "1.1.0" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 2b5c61a3..eaa663da 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "0.1.0" +version = "1.1.0" edition = "2018" description = "Proc macro helpers for libcst." license-file = "LICENSE" From 19c2862ea3d709ab94184a4ca2875d337936d800 Mon Sep 17 00:00:00 2001 From: Itamar Oren Date: Thu, 5 Oct 2023 20:18:05 -0700 Subject: [PATCH 422/632] Update README.rst (#1039) update Python parseable versions range to include 3.12 --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index baba8715..5957ba3e 100644 --- a/README.rst +++ b/README.rst @@ -37,7 +37,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python .. intro-start -LibCST parses Python 3.0 -> 3.11 source code as a CST tree that keeps +LibCST parses Python 3.0 -> 3.12 source code as a CST tree that keeps all formatting details (comments, whitespaces, parentheses, etc). It's useful for building automated refactoring (codemod) applications and linters. From 83f0daed42b0393b004f7cf4b12e12d6f92f6b25 Mon Sep 17 00:00:00 2001 From: Kyle Into Date: Thu, 5 Oct 2023 20:18:39 -0700 Subject: [PATCH 423/632] fix filepathprovider generic type (#1036) * fix filepathprovider type * remove extra import --- libcst/metadata/file_path_provider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libcst/metadata/file_path_provider.py b/libcst/metadata/file_path_provider.py index 681d3793..5ed9baa6 100644 --- a/libcst/metadata/file_path_provider.py +++ b/libcst/metadata/file_path_provider.py @@ -4,13 +4,13 @@ # LICENSE file in the root directory of this source tree. from pathlib import Path -from typing import Collection, List, Mapping, Optional +from typing import List, Mapping, Optional import libcst as cst from libcst.metadata.base_provider import BatchableMetadataProvider -class FilePathProvider(BatchableMetadataProvider[Collection[Path]]): +class FilePathProvider(BatchableMetadataProvider[Path]): """ Provides the path to the current file on disk as metadata for the root :class:`~libcst.Module` node. Requires a :class:`~libcst.metadata.FullRepoManager`. From 88d0b36cdd99c14cdf70784d292cfa3b5032782e Mon Sep 17 00:00:00 2001 From: Itamar Oren Date: Thu, 5 Oct 2023 20:36:12 -0700 Subject: [PATCH 424/632] Update pyproject.toml for Python 3.12 support (#1038) * Update pyproject.toml for Python 3.12 support add 3.12 classifier and update description to correctly reflect supported Python versions * Update pyproject.toml make the stated parsable versions range consistent with the README --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c3bb9bda..ee5a2fd6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools", "wheel", "setuptools-rust"] [project] name = "libcst" -description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." +description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.12 programs." readme = "README.rst" dynamic = ["version"] license = { file = "LICENSE" } @@ -14,6 +14,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] requires-python = ">=3.8" dependencies = [ From 693c6dc947fa59af2a8f1f8263e30a7807ee7491 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 6 Oct 2023 10:01:06 -0700 Subject: [PATCH 425/632] upgrade flake8 (#1040) --- .flake8 | 210 ++++++++++++++++++++++++++++++++----------------- pyproject.toml | 2 +- 2 files changed, 139 insertions(+), 73 deletions(-) diff --git a/.flake8 b/.flake8 index cf63afc5..eb0260b0 100644 --- a/.flake8 +++ b/.flake8 @@ -1,69 +1,126 @@ [flake8] ignore = - C407, # unnecessary list comprehension; A generator only better than a list - # comprehension if we don't always need to iterate through all items in - # the generator (based on the use case). - + # unnecessary list comprehension; A generator only better than a list + # comprehension if we don't always need to iterate through all items in + # the generator (based on the use case). + C407, # The following codes belong to pycodestyle, and overlap with black: - E101, # indentation contains mixed spaces and tabs - E111, # indentation is not a multiple of four - E112, # expected an indented block - E113, # unexpected indentation - E114, # indentation is not a multiple of four (comment) - E115, # expected an indented block (comment) - E116, # unexpected indentation (comment) - E121, # continuation line under-indented for hanging indent - E122, # continuation line missing indentation or outdented - E123, # closing bracket does not match indentation of opening bracket’s line - E124, # closing bracket does not match visual indentation - E125, # continuation line with same indent as next logical line - E126, # continuation line over-indented for hanging indent - E127, # continuation line over-indented for visual indent; is harmless - # (over-indent is visually unambiguous) and currently generates too - # many warnings for existing code. - E128, # continuation line under-indented for visual indent - E129, # visually indented line with same indent as next logical line - E131, # continuation line unaligned for hanging indent - E133, # closing bracket is missing indentation - E201, # whitespace after ‘(‘ - E202, # whitespace before ‘)’ - E203, # whitespace before ‘:’; this warning is invalid for slices - E211, # whitespace before ‘(‘ - E221, # multiple spaces before operator - E222, # multiple spaces after operator - E223, # tab before operator - E224, # tab after operator - E225, # missing whitespace around operator - E226, # missing whitespace around arithmetic operator - E227, # missing whitespace around bitwise or shift operator - E228, # missing whitespace around modulo operator - E231, # missing whitespace after ‘,’, ‘;’, or ‘:’ - E241, # multiple spaces after ‘,’ - E242, # tab after ‘,’ - E251, # unexpected spaces around keyword / parameter equals - E261, # at least two spaces before inline comment - E262, # inline comment should start with ‘# ‘ - E265, # block comment should start with ‘# ‘ - E266, # too many leading ‘#’ for block comment - E271, # multiple spaces after keyword - E272, # multiple spaces before keyword - E273, # tab after keyword - E274, # tab before keyword - E275, # missing whitespace after keyword - E301, # expected 1 blank line, found 0 - E302, # expected 2 blank lines, found 0 - E303, # too many blank lines (3) - E304, # blank lines found after function decorator - E305, # expected 2 blank lines after end of function or class - E306, # expected 1 blank line before a nested definition - E401, # multiple imports on one line - E501, # line too long (> 79 characters) - E502, # the backslash is redundant between brackets - E701, # multiple statements on one line (colon) - E702, # multiple statements on one line (semicolon) - E703, # statement ends with a semicolon - E704, # multiple statements on one line (def) + # indentation contains mixed spaces and tabs + E101, + # indentation is not a multiple of four + E111, + # expected an indented block + E112, + # unexpected indentation + E113, + # indentation is not a multiple of four (comment) + E114, + # expected an indented block (comment) + E115, + # unexpected indentation (comment) + E116, + # continuation line under-indented for hanging indent + E121, + # continuation line missing indentation or outdented + E122, + # closing bracket does not match indentation of opening bracket’s line + E123, + # closing bracket does not match visual indentation + E124, + # continuation line with same indent as next logical line + E125, + # continuation line over-indented for hanging indent + E126, + # continuation line over-indented for visual indent; is harmless + # (over-indent is visually unambiguous) and currently generates too + # many warnings for existing code. + E127, + + # continuation line under-indented for visual indent + E128, + # visually indented line with same indent as next logical line + E129, + # continuation line unaligned for hanging indent + E131, + # closing bracket is missing indentation + E133, + # whitespace after ‘(‘ + E201, + # whitespace before ‘)’ + E202, + # whitespace before ‘:’; this warning is invalid for slices + E203, + # whitespace before ‘(‘ + E211, + # multiple spaces before operator + E221, + # multiple spaces after operator + E222, + # tab before operator + E223, + # tab after operator + E224, + # missing whitespace around operator + E225, + # missing whitespace around arithmetic operator + E226, + # missing whitespace around bitwise or shift operator + E227, + # missing whitespace around modulo operator + E228, + # missing whitespace after ‘,’, ‘;’, or ‘:’ + E231, + # multiple spaces after ‘,’ + E241, + # tab after ‘,’ + E242, + # unexpected spaces around keyword / parameter equals + E251, + # at least two spaces before inline comment + E261, + # inline comment should start with ‘# ‘ + E262, + # block comment should start with ‘# ‘ + E265, + # too many leading ‘#’ for block comment + E266, + # multiple spaces after keyword + E271, + # multiple spaces before keyword + E272, + # tab after keyword + E273, + # tab before keyword + E274, + # missing whitespace after keyword + E275, + # expected 1 blank line, found 0 + E301, + # expected 2 blank lines, found 0 + E302, + # too many blank lines (3) + E303, + # blank lines found after function decorator + E304, + # expected 2 blank lines after end of function or class + E305, + # expected 1 blank line before a nested definition + E306, + # multiple imports on one line + E401, + # line too long (> 79 characters) + E501, + # the backslash is redundant between brackets + E502, + # multiple statements on one line (colon) + E701, + # multiple statements on one line (semicolon) + E702, + # statement ends with a semicolon + E703, + # multiple statements on one line (def) + E704, # These are pycodestyle lints that black doesn't catch: # E711, # comparison to None should be ‘if cond is None:’ # E712, # comparison to True should be ‘if cond is True:’ or ‘if cond:’ @@ -78,16 +135,25 @@ ignore = # I think these are internal to pycodestyle? # E901, # SyntaxError or IndentationError # E902, # IOError - F811, # isn't aware of type-only imports, results in false-positives - W191, # indentation contains tabs - W291, # trailing whitespace - W292, # no newline at end of file - W293, # blank line contains whitespace - W391, # blank line at end of file - W503, # line break before binary operator; binary operator in a new line is - # the standard - W504, # line break after binary operator - W505, # not part of PEP8; doc line too long (> 79 characters) + # isn't aware of type-only imports, results in false-positives + F811, + # indentation contains tabs + W191, + # trailing whitespace + W291, + # no newline at end of file + W292, + # blank line contains whitespace + W293, + # blank line at end of file + W391, + # line break before binary operator; binary operator in a new line is + # the standard + W503, + # line break after binary operator + W504, + # not part of PEP8; doc line too long (> 79 characters) + W505, # These are pycodestyle lints that black doesn't catch: # W601, # .has_key() is deprecated, use ‘in’ # W602, # deprecated form of raising exception diff --git a/pyproject.toml b/pyproject.toml index ee5a2fd6..dae30553 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dev = [ "coverage>=4.5.4", "build>=0.10.0", "fixit==2.0.0.post1", - "flake8>=3.7.8,<5", + "flake8==6.1.0", "Sphinx>=5.1.1", "hypothesis>=4.36.0", "hypothesmith>=0.0.4", From 9dd3ea7ec7606bc8a985d0398d49aaa90d68bc5d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 11:14:31 +0100 Subject: [PATCH 426/632] Bump pypa/cibuildwheel from 2.16.1 to 2.16.2 (#1041) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.1 to 2.16.2. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.1...v2.16.2) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 431074ab..c9b05f9f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -58,7 +58,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.16.1 + uses: pypa/cibuildwheel@v2.16.2 - uses: actions/upload-artifact@v3 with: path: wheelhouse/*.whl From f8a9b80d9e05f0e14f3a68b0a6a5767a95184fa4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 15:07:17 -0700 Subject: [PATCH 427/632] Bump rustix from 0.38.9 to 0.38.19 in /native (#1043) Bumps [rustix](https://github.com/bytecodealliance/rustix) from 0.38.9 to 0.38.19. - [Release notes](https://github.com/bytecodealliance/rustix/releases) - [Commits](https://github.com/bytecodealliance/rustix/compare/v0.38.9...v0.38.19) --- updated-dependencies: - dependency-name: rustix dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 6d360335..35996e8d 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -343,9 +343,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.147" +version = "0.2.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" @@ -376,9 +376,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.5" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" +checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" [[package]] name = "lock_api" @@ -671,9 +671,9 @@ checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" [[package]] name = "rustix" -version = "0.38.9" +version = "0.38.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bfe0f2582b4931a45d1fa608f8a8722e8b3c7ac54dd6d5f3b3212791fedef49" +checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" dependencies = [ "bitflags 2.4.0", "errno", From 52bbff6dfc2dd7b3086710bc64896ccfaaed1a3d Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Tue, 12 Dec 2023 14:40:40 -0800 Subject: [PATCH 428/632] Set repository metadata entry for Rust crates (#1063) --- native/libcst/Cargo.toml | 2 +- native/libcst_derive/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index e1fe3b13..e96d4bc4 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -11,7 +11,7 @@ edition = "2018" rust-version = "1.70" description = "A Python parser and Concrete Syntax Tree library." license-file = "LICENSE" -homepage = "https://github.com/Instagram/LibCST" +repository = "https://github.com/Instagram/LibCST" documentation = "https://libcst.rtfd.org" keywords = ["python", "cst", "ast"] categories = ["parser-implementations"] diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index eaa663da..2675ab18 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -4,7 +4,7 @@ version = "1.1.0" edition = "2018" description = "Proc macro helpers for libcst." license-file = "LICENSE" -homepage = "https://github.com/Instagram/LibCST" +repository = "https://github.com/Instagram/LibCST" documentation = "https://libcst.rtfd.org" keywords = ["macros", "python"] From d97fb9be809676a3b6a3d3b6f9e35e954353bfab Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Dec 2023 07:53:54 -0800 Subject: [PATCH 429/632] Update syn to v2 (#1064) --- native/Cargo.lock | 37 +++++++++++++++-------- native/libcst_derive/Cargo.toml | 2 +- native/libcst_derive/src/cstnode.rs | 46 ++++++++++++----------------- native/libcst_derive/src/into_py.rs | 2 +- 4 files changed, 45 insertions(+), 42 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 35996e8d..2d3899cd 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -370,7 +370,7 @@ name = "libcst_derive" version = "1.1.0" dependencies = [ "quote", - "syn", + "syn 2.0.41", "trybuild", ] @@ -533,9 +533,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.46" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b" +checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" dependencies = [ "unicode-ident", ] @@ -586,7 +586,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -597,14 +597,14 @@ checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] name = "quote" -version = "1.0.21" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2", ] @@ -720,7 +720,7 @@ checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -742,9 +742,20 @@ checksum = "cc88c725d61fc6c3132893370cac4a0200e3fedf5da8331c570664b1987f5ca2" [[package]] name = "syn" -version = "1.0.103" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269" dependencies = [ "proc-macro2", "quote", @@ -783,7 +794,7 @@ checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -864,7 +875,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn", + "syn 1.0.109", "wasm-bindgen-shared", ] @@ -886,7 +897,7 @@ checksum = "7d94ac45fcf608c1f45ef53e748d35660f168490c10b23704c7779ab8f5c3048" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", "wasm-bindgen-backend", "wasm-bindgen-shared", ] diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 2675ab18..5d7064e9 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -12,7 +12,7 @@ keywords = ["macros", "python"] proc-macro = true [dependencies] -syn = "1.0" +syn = "2.0" quote = "1.0" [dev-dependencies] diff --git a/native/libcst_derive/src/cstnode.rs b/native/libcst_derive/src/cstnode.rs index 480f954d..cdb6947e 100644 --- a/native/libcst_derive/src/cstnode.rs +++ b/native/libcst_derive/src/cstnode.rs @@ -13,8 +13,8 @@ use syn::{ spanned::Spanned, token::Comma, AngleBracketedGenericArguments, Attribute, Data, DataEnum, DataStruct, DeriveInput, Field, - Fields, FieldsNamed, FieldsUnnamed, GenericArgument, Generics, Ident, Meta, MetaList, - NestedMeta, Path, PathArguments, PathSegment, Token, Type, TypePath, Visibility, + Fields, FieldsNamed, FieldsUnnamed, GenericArgument, Generics, Ident, Meta, Path, + PathArguments, PathSegment, Token, Type, TypePath, Visibility, }; pub(crate) struct CSTNodeParams { @@ -68,7 +68,7 @@ impl Parse for SupportedTrait { impl Parse for CSTNodeParams { fn parse(input: ParseStream) -> syn::Result { Ok(Self { - traits: input.parse_terminated(SupportedTrait::parse)?, + traits: input.parse_terminated(SupportedTrait::parse, Token![,])?, }) } } @@ -239,12 +239,8 @@ fn impl_unnamed_fields(mut deflated_fields: FieldsUnnamed) -> FieldsUnnamed { // Make sure all Deflated* types have 'r 'a lifetime params if !added_lifetime { - deflated_fields.unnamed.push(Field { - vis: Visibility::Inherited, - ty: parse_quote!(std::marker::PhantomData<&'r &'a ()>), - attrs: Default::default(), - colon_token: Default::default(), - ident: Default::default(), + deflated_fields.unnamed.push(parse_quote! { + std::marker::PhantomData<&'r &'a ()> }); } deflated_fields @@ -284,12 +280,8 @@ fn impl_named_fields(mut fields: FieldsNamed) -> (Fields, Fields) { // Make sure all Deflated* types have 'r 'a lifetime params if !added_lifetime { - deflated_fields.named.push(Field { - attrs: Default::default(), - vis: Visibility::Inherited, - ident: Some(parse_quote!(_phantom)), - colon_token: Default::default(), - ty: parse_quote!(std::marker::PhantomData<&'r &'a ()>), + deflated_fields.named.push(parse_quote! { + _phantom: std::marker::PhantomData<&'r &'a ()> }); } @@ -411,19 +403,19 @@ fn rightmost_path_segment_mut(ty: &mut Type) -> Option<&mut PathSegment> { } fn is_not_intopy_attr(attr: &Attribute) -> bool { - let path = &attr.path; - // support #[cfg_attr(feature="py", skip_py)] + let path = attr.path(); + // support #[cfg_attr(feature = "py", skip_py)] if path.is_ident("cfg_attr") { - match attr.parse_meta() { - Ok(Meta::List(MetaList { nested, .. })) => { - for meta in nested { - if let NestedMeta::Meta(Meta::Path(path)) = meta { - return !is_intopy_attr_path(&path); - } - } - } - _ => return false, - } + return match attr.parse_args_with(|input: ParseStream| { + let _: Meta = input.parse()?; + let _: Token![,] = input.parse()?; + let nested_path: Path = input.parse()?; + let _: Option = input.parse()?; + Ok(nested_path) + }) { + Ok(nested_path) => !is_intopy_attr_path(&nested_path), + Err(_) => false, + }; } !is_intopy_attr_path(path) } diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs index e1d85132..3894212f 100644 --- a/native/libcst_derive/src/into_py.rs +++ b/native/libcst_derive/src/into_py.rs @@ -176,5 +176,5 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt } fn has_attr(attrs: &[Attribute], name: &'static str) -> bool { - attrs.iter().any(|attr| attr.path.is_ident(name)) + attrs.iter().any(|attr| attr.path().is_ident(name)) } From ce5903f4cb5e84c963741d325c7c4e78d30e3191 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 20 Dec 2023 16:58:39 +0000 Subject: [PATCH 430/632] ci: update rust toolchain GHA (#1072) `actions-rs` is unmaintained --- .github/workflows/ci.yml | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f81b3bf4..b78dfd0f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,9 +27,7 @@ jobs: - name: Install hatch run: | pip install -U hatch - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable + - uses: dtolnay/rust-toolchain@stable - name: Rust Cache uses: Swatinem/rust-cache@v2.7.0 with: @@ -138,9 +136,8 @@ jobs: os: [ubuntu-latest, macos-latest, windows-latest] steps: - uses: actions/checkout@v4 - - uses: actions-rs/toolchain@v1 + - uses: dtolnay/rust-toolchain@stable with: - toolchain: stable components: rustfmt, clippy - name: Rust Cache uses: Swatinem/rust-cache@v2.7.0 @@ -179,11 +176,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions-rs/toolchain@v1 + - uses: dtolnay/rust-toolchain@stable with: - profile: minimal - toolchain: stable - override: true + components: rustfmt - run: rustup component add rustfmt - uses: actions-rs/cargo@v1 with: From 43a27b1222993f29086748ef6a08a57801c536aa Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 20 Dec 2023 22:23:21 +0000 Subject: [PATCH 431/632] cI: remove rust cache (#1074) --- .github/workflows/build.yml | 5 ----- .github/workflows/ci.yml | 12 ++---------- 2 files changed, 2 insertions(+), 15 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c9b05f9f..a089e557 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -48,11 +48,6 @@ jobs: cache: pip cache-dependency-path: "pyproject.toml" python-version: "3.10" - - name: Rust Cache - if: ${{ !contains(matrix.os, 'self-hosted') }} - uses: Swatinem/rust-cache@v2.7.0 - with: - working-directory: native - name: Disable scmtools local scheme if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b78dfd0f..70441807 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,12 +27,9 @@ jobs: - name: Install hatch run: | pip install -U hatch - - uses: dtolnay/rust-toolchain@stable - - name: Rust Cache - uses: Swatinem/rust-cache@v2.7.0 + - uses: actions-rs/toolchain@v1 with: - shared-key: "shared" - workspaces: native + toolchain: stable - name: Build LibCST run: hatch env create - name: Tests @@ -139,11 +136,6 @@ jobs: - uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy - - name: Rust Cache - uses: Swatinem/rust-cache@v2.7.0 - with: - shared-key: "shared" - workspaces: native - uses: actions/setup-python@v4 with: cache: pip From 5a8650b92e52486aaa30948e32bec42cf63f9335 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:01:27 -0800 Subject: [PATCH 432/632] Bump black from 23.9.1 to 23.12.1 (#1077) Bumps [black](https://github.com/psf/black) from 23.9.1 to 23.12.1. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.9.1...23.12.1) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index dae30553..265b24c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ dependencies = [ [project.optional-dependencies] dev = [ - "black==23.9.1", + "black==23.12.1", "coverage>=4.5.4", "build>=0.10.0", "fixit==2.0.0.post1", From 976b84c618f86a3b9dd96ecd454910948605699c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:02:15 -0800 Subject: [PATCH 433/632] Bump actions/download-artifact from 3 to 4 (#1066) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 3 to 4. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/pypi_upload.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 7593f3de..433460ea 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -22,7 +22,7 @@ jobs: fetch-depth: 0 - name: Download binary wheels id: download - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: wheels path: wheelhouse From dc329f29ac859c425f58524448d4c4dccf07c4e8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:05:13 -0800 Subject: [PATCH 434/632] Bump actions/upload-artifact from 3 to 4 (#1065) Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 3 to 4. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a089e557..93be8d12 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -54,7 +54,7 @@ jobs: echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels uses: pypa/cibuildwheel@v2.16.2 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl name: wheels diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 70441807..c4ac8c08 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -96,7 +96,7 @@ jobs: fail_ci_if_error: true verbose: true - name: Archive Coverage - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: coverage path: coverage.xml @@ -118,7 +118,7 @@ jobs: - uses: ts-graphviz/setup-graphviz@v1 - run: hatch run docs - name: Archive Docs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: sphinx-docs path: docs/build From 4b31d3db4965ed6e5ab1ac2883c2f15dd928d756 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:10:35 -0800 Subject: [PATCH 435/632] Bump actions/setup-python from 4 to 5 (#1060) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 12 ++++++------ .github/workflows/pypi_upload.yml | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 93be8d12..5ab9bc4f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 if: ${{ !contains(matrix.os, 'self-hosted') }} with: cache: pip diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c4ac8c08..55b57eb4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,7 +19,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: cache: pip cache-dependency-path: "pyproject.toml" @@ -46,7 +46,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: cache: pip cache-dependency-path: "pyproject.toml" @@ -63,7 +63,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: cache: pip cache-dependency-path: "pyproject.toml" @@ -79,7 +79,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: cache: pip cache-dependency-path: "pyproject.toml" @@ -108,7 +108,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: cache: pip cache-dependency-path: "pyproject.toml" @@ -136,7 +136,7 @@ jobs: - uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: cache: pip cache-dependency-path: "pyproject.toml" diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 433460ea..5fb2fafa 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -26,7 +26,7 @@ jobs: with: name: wheels path: wheelhouse - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: cache: pip cache-dependency-path: "pyproject.toml" From 30df6fcdabce96d2702b149241c969fa123929b7 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 3 Jan 2024 20:06:37 +0000 Subject: [PATCH 436/632] remove 3.8 support (#1073) This PR also starts using 3.12 properly in CI --- .github/workflows/ci.yml | 2 +- README.rst | 2 +- pyproject.toml | 3 +-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 55b57eb4..cdbd863e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,7 +14,7 @@ jobs: fail-fast: false matrix: os: [macos-latest, ubuntu-latest, windows-latest] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12-dev"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 with: diff --git a/README.rst b/README.rst index 5957ba3e..641b2112 100644 --- a/README.rst +++ b/README.rst @@ -125,7 +125,7 @@ For a more detailed usage example, `see our documentation Installation ------------ -LibCST requires Python 3.8+ and can be easily installed using most common Python +LibCST requires Python 3.9+ and can be easily installed using most common Python packaging tools. We recommend installing the latest stable release from `PyPI `_ with pip: diff --git a/pyproject.toml b/pyproject.toml index 265b24c1..e2e9ea4c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,13 +10,12 @@ license = { file = "LICENSE" } classifiers = [ "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] -requires-python = ">=3.8" +requires-python = ">=3.9" dependencies = [ "typing_extensions>=3.7.4.2", "typing_inspect>=0.4.0", From dbbfe1e0b8478548686fca5bc73be7ec0e0c9148 Mon Sep 17 00:00:00 2001 From: anonymousdouble <112695649+anonymousdouble@users.noreply.github.com> Date: Thu, 4 Jan 2024 08:06:33 +1100 Subject: [PATCH 437/632] Update test_fix_pyre_directives.py (#1082) * Update test_fix_pyre_directives.py refactor with fstring to format string to make code more Pythonic. * Update test_fix_pyre_directives.py refactor with fstring to format string to make code more Pythonic. * Update test_fix_pyre_directives.py refactor with fstring to format string to make code more Pythonic. * Update test_fix_pyre_directives.py refactor with fstring to format string to make code more Pythonic. * Update test_fix_pyre_directives.py refactor with chain constant value assignment to make code more Pythonic * Update test_fix_pyre_directives.py refactor with chain constant value assignment to make code more Pythonic --- .../tests/test_fix_pyre_directives.py | 38 +++++-------------- 1 file changed, 9 insertions(+), 29 deletions(-) diff --git a/libcst/codemod/commands/tests/test_fix_pyre_directives.py b/libcst/codemod/commands/tests/test_fix_pyre_directives.py index 2d07af12..4707073a 100644 --- a/libcst/codemod/commands/tests/test_fix_pyre_directives.py +++ b/libcst/codemod/commands/tests/test_fix_pyre_directives.py @@ -14,14 +14,9 @@ class TestFixPyreDirectivesCommand(CodemodTest): """ Tests that a pyre-strict inside the module header doesn't get touched. """ - before = """ - # pyre-strict - from typing import List - - def baz() -> List[Foo]: - pass - """ - after = """ + after = ( + before + ) = """ # pyre-strict from typing import List @@ -34,16 +29,9 @@ class TestFixPyreDirectivesCommand(CodemodTest): """ Tests that a pyre-strict inside the module header doesn't get touched. """ - before = """ - # This is some header comment. - # - # pyre-strict - from typing import List - - def baz() -> List[Foo]: - pass - """ - after = """ + after = ( + before + ) = """ # This is some header comment. # # pyre-strict @@ -58,17 +46,9 @@ class TestFixPyreDirectivesCommand(CodemodTest): """ Tests that a pyre-strict inside the module header doesn't get touched. """ - before = """ - # pyre-strict - # - # This is some header comment. - - from typing import List - - def baz() -> List[Foo]: - pass - """ - after = """ + after = ( + before + ) = """ # pyre-strict # # This is some header comment. From 1757e0f5b41a638af574823315c499ac13e72b09 Mon Sep 17 00:00:00 2001 From: Alvaro Leiva geisse Date: Wed, 3 Jan 2024 14:16:52 -0800 Subject: [PATCH 438/632] installing rustc/cargo for mybinder demo (#1083) when we switch to the rust compiler by default, mybinder stop working, as reoported in https://github.com/Instagram/LibCST/issues/1054 this is because the binder docker image does not have a rust compiler or tools, this install them by using the apt.txt file Co-authored-by: Alvaro Leiva Geisse --- .gitignore | 1 + apt.txt | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 apt.txt diff --git a/.gitignore b/.gitignore index 57628584..004ebb4c 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,4 @@ libcst/_version.py target/ venv/ .venv/ +.idea/ diff --git a/apt.txt b/apt.txt new file mode 100644 index 00000000..a3e85e90 --- /dev/null +++ b/apt.txt @@ -0,0 +1,2 @@ +rustc +cargo \ No newline at end of file From a5f1f9a231abcfae3a938b213d271cd266260020 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 4 Jan 2024 12:42:15 +0000 Subject: [PATCH 439/632] ci: use separate artifact names (#1085) --- .github/workflows/build.yml | 2 +- .github/workflows/pypi_upload.yml | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5ab9bc4f..6eb80744 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -57,4 +57,4 @@ jobs: - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl - name: wheels + name: wheels-${{matrix.os}}-${{matrix.vers}} diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 5fb2fafa..c4ed262e 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -24,8 +24,9 @@ jobs: id: download uses: actions/download-artifact@v4 with: - name: wheels + pattern: wheels-* path: wheelhouse + merge-multiple: true - uses: actions/setup-python@v5 with: cache: pip From c011a48a249ac3efc19dc994a401054d2f13d0be Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Thu, 4 Jan 2024 11:26:27 -0800 Subject: [PATCH 440/632] Allow `Element::codegen` to be used by external users (#1071) The `Codegen` trait is `pub`, but users wanting to explicitly perform codegen for `Element` had to copy-paste this part of the code. --- native/libcst/src/nodes/expression.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 39ca64b1..971ef8bc 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -937,7 +937,7 @@ pub enum Element<'a> { } impl<'a> Element<'a> { - fn codegen( + pub fn codegen( &self, state: &mut CodegenState<'a>, default_comma: bool, From c6fa092565e7f995286a5ca9bbda80c826206de7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Jan 2024 09:39:05 +0000 Subject: [PATCH 441/632] Update maturin requirement from <0.16,>=0.8.3 to >=0.8.3,<1.5 (#1059) Updates the requirements on [maturin](https://github.com/pyo3/maturin) to permit the latest version. - [Release notes](https://github.com/pyo3/maturin/releases) - [Changelog](https://github.com/PyO3/maturin/blob/main/Changelog.md) - [Commits](https://github.com/pyo3/maturin/compare/v0.8.3...v1.4.0) --- updated-dependencies: - dependency-name: maturin dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e2e9ea4c..fc37c085 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ dev = [ "hypothesis>=4.36.0", "hypothesmith>=0.0.4", "jupyter>=1.0.0", - "maturin>=0.8.3,<0.16", + "maturin>=0.8.3,<1.5", "nbsphinx>=0.4.2", "prompt-toolkit>=2.0.9", "pyre-check==0.9.18; platform_system != 'Windows'", From 266f531de1ed82393ef87ea959bee8a76a0e5972 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 14:05:38 +0000 Subject: [PATCH 442/632] Bump fixit from 2.0.0.post1 to 2.1.0 (#1087) Bumps [fixit](https://github.com/Instagram/Fixit) from 2.0.0.post1 to 2.1.0. - [Changelog](https://github.com/Instagram/Fixit/blob/main/CHANGELOG.md) - [Commits](https://github.com/Instagram/Fixit/compare/v2.0.0.post1...v2.1.0) --- updated-dependencies: - dependency-name: fixit dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fc37c085..ebb5381b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dev = [ "black==23.12.1", "coverage>=4.5.4", "build>=0.10.0", - "fixit==2.0.0.post1", + "fixit==2.1.0", "flake8==6.1.0", "Sphinx>=5.1.1", "hypothesis>=4.36.0", From dfcba1ff0382fe40ef50b07b21adbcf58964aeb1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 14:06:02 +0000 Subject: [PATCH 443/632] Bump ufmt from 2.2.0 to 2.3.0 (#1047) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.2.0 to 2.3.0. - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.2.0...v2.3.0) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ebb5381b..2f8a6db9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ dev = [ "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", - "ufmt==2.2.0", + "ufmt==2.3.0", "usort==1.0.7", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", From c5ef75d0c3745eed6f115fcc6ec2cf13f9df2e5d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 09:01:32 -0800 Subject: [PATCH 444/632] Bump jinja2 from 3.1.2 to 3.1.3 (#1090) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.2 to 3.1.3. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.2...3.1.3) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2f8a6db9..076a8044 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ dev = [ "usort==1.0.7", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", - "jinja2==3.1.2", + "jinja2==3.1.3", ] [project.urls] From f5fe4eb25acf029cc8b7d529d6900901d63fdbcf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 09:02:10 -0800 Subject: [PATCH 445/632] Bump flake8 from 6.1.0 to 7.0.0 (#1088) Bumps [flake8](https://github.com/pycqa/flake8) from 6.1.0 to 7.0.0. - [Commits](https://github.com/pycqa/flake8/compare/6.1.0...7.0.0) --- updated-dependencies: - dependency-name: flake8 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 076a8044..91807ea6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dev = [ "coverage>=4.5.4", "build>=0.10.0", "fixit==2.1.0", - "flake8==6.1.0", + "flake8==7.0.0", "Sphinx>=5.1.1", "hypothesis>=4.36.0", "hypothesmith>=0.0.4", From fad448eb81b5c0abe8b0454c1bc715e80e5fb3bd Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Tue, 16 Jan 2024 13:15:47 -0800 Subject: [PATCH 446/632] Upgrade rust to version 1.70 in readthedocs config (#1091) Readthedocs builds are currently failing because the libcst wheel fails to build, hitting an error when trying to get rust dependencies: ``` running build_rust Updating crates.io index error: failed to select a version for the requirement `regex = "=1.9.3"` candidate versions found which didn't match: 1.8.4, 1.8.3, 1.8.2, ... location searched: crates.io index required by package `libcst v1.1.0 (/home/docs/checkouts/readthedocs.org/user_builds/libcst/checkouts/latest/native/libcst)` error: `cargo metadata --manifest-path native/libcst/Cargo.toml --format-version 1` failed with code 101 ``` Assuming this is related to current configuration requesting rust v1.55, rather than 1.70 that is currently offered. --- .readthedocs.yml | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 008477b4..bb6eb608 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -9,7 +9,7 @@ build: os: ubuntu-20.04 tools: python: "3" - rust: "1.55" + rust: "1.70" apt_packages: - graphviz diff --git a/pyproject.toml b/pyproject.toml index 91807ea6..b49c09a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools", "wheel", "setuptools-rust"] +requires = ["setuptools", "setuptools-scm", "setuptools-rust", "wheel"] [project] name = "libcst" From a2a60c147cd4322d05771613e852e59a949be44e Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Thu, 18 Jan 2024 11:09:52 -0800 Subject: [PATCH 447/632] Make readme example use python syntax highlighting (#1092) --- README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 641b2112..3b3d39ed 100644 --- a/README.rst +++ b/README.rst @@ -62,7 +62,9 @@ Example expression:: 1 + 2 -CST representation:: +CST representation: + +.. code-block:: python BinaryOperation( left=Integer( From c854c986b611042b4bad842a6db6096c2f554400 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 2 Feb 2024 20:49:25 +0000 Subject: [PATCH 448/632] Fix parsing list matchers without explicit brackets (#1097) ``` match a: case 1, 2: pass ``` This is parsed correctly by the grammar, but the default values of `MatchList.lbracket` and `MatchList.rbracket` are inconsistent between Python and Rust, causing the above snippet to round-trip (from Python) to: ``` match a: case [1, 2]: pass ``` Fixes #1096. --- libcst/_nodes/statement.py | 4 ++-- native/libcst/tests/fixtures/malicious_match.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index a9502da8..bf9e8ecc 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -3071,10 +3071,10 @@ class MatchList(MatchSequence): patterns: Sequence[Union[MatchSequenceElement, MatchStar]] #: An optional left bracket. If missing, this is an open sequence pattern. - lbracket: Optional[LeftSquareBracket] = LeftSquareBracket.field() + lbracket: Optional[LeftSquareBracket] = None #: An optional left bracket. If missing, this is an open sequence pattern. - rbracket: Optional[RightSquareBracket] = RightSquareBracket.field() + rbracket: Optional[RightSquareBracket] = None #: Parenthesis at the beginning of the node lpar: Sequence[LeftParen] = () diff --git a/native/libcst/tests/fixtures/malicious_match.py b/native/libcst/tests/fixtures/malicious_match.py index cabf4689..8c46571f 100644 --- a/native/libcst/tests/fixtures/malicious_match.py +++ b/native/libcst/tests/fixtures/malicious_match.py @@ -36,4 +36,5 @@ match x: case Foo | Bar | ( Baz): pass case x,y , * more :pass case y.z: pass + case 1, 2: pass From 55f3e34dfcfff41691789609118360ecb6968f5d Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 2 Feb 2024 20:50:07 +0000 Subject: [PATCH 449/632] Add roundtrip tests from Python (#1098) Our current roundtrip tests only excerise the Rust codepaths. This PR runs the same roundtrip scenarios but from Python. --- libcst/tests/test_roundtrip.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 libcst/tests/test_roundtrip.py diff --git a/libcst/tests/test_roundtrip.py b/libcst/tests/test_roundtrip.py new file mode 100644 index 00000000..010d840d --- /dev/null +++ b/libcst/tests/test_roundtrip.py @@ -0,0 +1,26 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from pathlib import Path +from unittest import TestCase + +from libcst import parse_module +from libcst._parser.entrypoints import is_native + +fixtures: Path = Path(__file__).parent.parent.parent / "native/libcst/tests/fixtures" + + +class RoundTripTests(TestCase): + def test_clean_roundtrip(self) -> None: + if not is_native(): + self.skipTest("pure python parser doesn't work with this") + self.assertTrue(fixtures.exists(), f"{fixtures} should exist") + files = list(fixtures.iterdir()) + self.assertGreater(len(files), 0) + for file in files: + with self.subTest(file=str(file)): + src = file.read_text(encoding="utf-8") + mod = parse_module(src) + self.assertEqual(mod.code, src) From 724026aa6543b54bc4584c1d9493ebdfded52c40 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 2 Feb 2024 20:58:56 +0000 Subject: [PATCH 450/632] Remove reference to distutil (#1099) Distutil has been removed in Python 3.12. Tested by: ``` py -m libcst.tool codemod noop.NOOPCommand .\libcst\tool.py Calculating full-repo metadata... Executing codemod... Finished codemodding 1 files! - Transformed 1 files successfully. - Skipped 0 files. - Failed to codemod 0 files. - 0 warnings were generated. ``` --- libcst/tool.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/libcst/tool.py b/libcst/tool.py index 64144cba..5469ef27 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -10,11 +10,11 @@ import argparse import dataclasses -import distutils.spawn import importlib import inspect import os import os.path +import shutil import sys import textwrap from abc import ABC, abstractmethod @@ -375,10 +375,7 @@ def _find_and_load_config(proc_name: str) -> Dict[str, Any]: # Make sure that the formatter is findable. if config["formatter"]: - exe = ( - distutils.spawn.find_executable(config["formatter"][0]) - or config["formatter"][0] - ) + exe = shutil.which(config["formatter"][0]) or config["formatter"][0] config["formatter"] = [os.path.abspath(exe), *config["formatter"][1:]] return config From 68f98c676c125efda907368efddd3a8752cf0fed Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 3 Feb 2024 09:40:04 +0000 Subject: [PATCH 451/632] ci: various tweaks (#1100) --- .github/workflows/ci.yml | 4 +--- pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cdbd863e..aa110a8f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: with: toolchain: stable - name: Build LibCST - run: hatch env create + run: hatch -vv env create - name: Tests run: hatch run test - name: Pure Parser Tests @@ -138,8 +138,6 @@ jobs: components: rustfmt, clippy - uses: actions/setup-python@v5 with: - cache: pip - cache-dependency-path: "pyproject.toml" python-version: "3.10" - name: test uses: actions-rs/cargo@v1 diff --git a/pyproject.toml b/pyproject.toml index b49c09a8..c7ffee7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ Github = "https://github.com/Instagram/LibCST" Changelog = "https://github.com/Instagram/LibCST/blob/main/CHANGELOG.md" [tool.black] -target-version = ["py38"] +target-version = ["py39"] extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format [tool.hatch.envs.default] @@ -68,7 +68,7 @@ lint = [ "python -m slotscheck libcst", "python scripts/check_copyright.py", ] -test = "python -m libcst.tests" +test = ["python --version", "python -m libcst.tests"] typecheck = ["pyre --version", "pyre check"] [tool.slotscheck] From e5cc07c3428ea87af0a89bf50d070f4a8d19e998 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 3 Feb 2024 09:52:16 +0000 Subject: [PATCH 452/632] ci: upgrade to cibuildwheel 2.16.5 (#1101) This fixes wheel build failures on Windows. --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6eb80744..38e95c36 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -53,7 +53,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.16.2 + uses: pypa/cibuildwheel@v2.16.5 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From a4fb999774bd41ef30220e3d67e3f9888257509b Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 3 Feb 2024 11:39:41 +0000 Subject: [PATCH 453/632] ci: build linux-arm64 wheels using emulation (#1102) Stop using self-hosted runner --- .github/workflows/build.yml | 35 +++++++++++-------------------- .github/workflows/pypi_upload.yml | 6 +++--- 2 files changed, 15 insertions(+), 26 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 38e95c36..14ed9d67 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,29 +4,13 @@ on: jobs: # Build python wheels - build_matrix: - name: Prepare job matrix for build job - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - steps: - - uses: actions/checkout@v4 - - id: set-matrix - # match github.ref to the on_ref_regex field in the json - # to skip running linux/aarch64 builds on PRs - run: | - matrix=$(jq --arg ref "${{ github.ref }}" \ - 'map(select(.on_ref_regex as $pat | $pat == null or ($ref | test($pat))) | del(.on_ref_regex))' \ - .github/build-matrix.json) - echo matrix={\"include\":$(echo $matrix)}\" >> $GITHUB_OUTPUT - build: - name: Build wheels on ${{ join(matrix.os, '/') }}/${{ matrix.vers }} - needs: build_matrix + name: Build wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: fail-fast: false - matrix: ${{fromJson(needs.build_matrix.outputs.matrix)}} + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] env: SCCACHE_VERSION: 0.2.13 CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" @@ -36,25 +20,30 @@ jobs: CIBW_BEFORE_ALL_WINDOWS: "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME' CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_*" - CIBW_ARCHS: ${{ matrix.vers }} + CIBW_ARCHS_LINUX: auto aarch64 + CIBW_ARCHS_MACOS: x86_64 arm64 CIBW_BUILD_VERBOSITY: 1 steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-python@v5 - if: ${{ !contains(matrix.os, 'self-hosted') }} with: cache: pip cache-dependency-path: "pyproject.toml" - python-version: "3.10" + python-version: "3.12" - name: Disable scmtools local scheme if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV + - name: Set up QEMU + if: runner.os == 'Linux' + uses: docker/setup-qemu-action@v3 + with: + platforms: all - name: Build wheels uses: pypa/cibuildwheel@v2.16.5 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl - name: wheels-${{matrix.os}}-${{matrix.vers}} + name: wheels-${{matrix.os}} diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index c4ed262e..6526a002 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -48,12 +48,12 @@ jobs: with: user: __token__ password: ${{ secrets.TEST_PYPI_API_TOKEN }} - repository_url: https://test.pypi.org/legacy/ - packages_dir: ${{ steps.download.outputs.download-path }} + repository-url: https://test.pypi.org/legacy/ + packages-dir: ${{ steps.download.outputs.download-path }} - name: Publish distribution 📦 to PyPI if: github.event_name == 'release' uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} - packages_dir: ${{ steps.download.outputs.download-path }} + packages-dir: ${{ steps.download.outputs.download-path }} From 8a19d05538e4b7a399ec14730569d856997a2042 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 3 Feb 2024 12:30:39 +0000 Subject: [PATCH 454/632] remove comment --- .github/workflows/ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa110a8f..1f1aa755 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,7 +7,6 @@ on: pull_request: jobs: - # Run unittests test: runs-on: ${{ matrix.os }} strategy: From fa9300e3a3c823d7fb73680d1d98e668ce3e18ff Mon Sep 17 00:00:00 2001 From: Dimitris Iliopoulos Date: Thu, 15 Feb 2024 01:08:09 -0800 Subject: [PATCH 455/632] Upgrade pyo3 to 0.20 (#1106) Co-authored-by: Dimitris Iliopoulos --- native/Cargo.lock | 52 ++++++++++++++++++++----------- native/libcst/Cargo.toml | 2 +- native/libcst/src/nodes/traits.rs | 4 +-- 3 files changed, 37 insertions(+), 21 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 2d3899cd..0ae24307 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -201,7 +201,7 @@ dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset", + "memoffset 0.6.5", "once_cell", "scopeguard", ] @@ -261,6 +261,12 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + [[package]] name = "hermit-abi" version = "0.1.19" @@ -278,9 +284,9 @@ checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" [[package]] name = "indoc" -version = "1.0.6" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05a0bd019339e5d968b37855180087b7b9d512c5046fbd244cf8c95687927d6e" +checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" [[package]] name = "instant" @@ -414,6 +420,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + [[package]] name = "num-traits" version = "0.2.15" @@ -542,14 +557,14 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.17.3" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543" +checksum = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0" dependencies = [ "cfg-if", "indoc", "libc", - "memoffset", + "memoffset 0.9.0", "parking_lot", "pyo3-build-config", "pyo3-ffi", @@ -559,9 +574,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.17.3" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8" +checksum = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be" dependencies = [ "once_cell", "target-lexicon", @@ -569,9 +584,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.17.3" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc" +checksum = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1" dependencies = [ "libc", "pyo3-build-config", @@ -579,25 +594,26 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.17.3" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28" +checksum = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 1.0.109", + "syn 2.0.41", ] [[package]] name = "pyo3-macros-backend" -version = "0.17.3" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f" +checksum = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f" dependencies = [ + "heck", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.41", ] [[package]] @@ -839,9 +855,9 @@ checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" [[package]] name = "unindent" -version = "0.1.9" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52fee519a3e570f7df377a06a1a7775cdbfb7aa460be7e08de2b1f0e69973a44" +checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" [[package]] name = "walkdir" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index e96d4bc4..737f71b9 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.9" -pyo3 = { version = ">=0.17,<0.20", optional = true } +pyo3 = { version = "0.20", optional = true } thiserror = "1.0.37" peg = "0.8.1" chic = "1.2.2" diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index 1efa29fc..1dd58132 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -118,7 +118,7 @@ impl<'a, T: Inflate<'a>> Inflate<'a> for Vec { } #[cfg(feature = "py")] pub mod py { - use pyo3::{types::PyTuple, AsPyPointer, IntoPy, PyObject, PyResult, Python}; + use pyo3::{types::PyAny, types::PyTuple, IntoPy, PyObject, PyResult, Python}; // TODO: replace with upstream implementation once // https://github.com/PyO3/pyo3/issues/1813 is resolved @@ -188,7 +188,7 @@ pub mod py { impl TryIntoPy for &'_ T where - T: AsPyPointer, + T: AsRef, { fn try_into_py(self, py: Python) -> PyResult { Ok(self.into_py(py)) From 627bb0c4ab4ec875d5b683db4825a321b9c9072d Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 19 Feb 2024 12:04:43 +0000 Subject: [PATCH 456/632] bump version to 1.2.0 (#1110) --- CHANGELOG.md | 27 +++++++++++++++++++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 32 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e99fccf..228e119d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,30 @@ +# 1.2.0 - 2024-02-19 + +## Updated +* Support running LibCST on Python 3.12 and drop support for running it on 3.8 + * remove 3.8 support by @zsol in https://github.com/Instagram/LibCST/pull/1073 + * Remove reference to distutils by @zsol in https://github.com/Instagram/LibCST/pull/1099 + * Update pyproject.toml for Python 3.12 support by @itamaro in https://github.com/Instagram/LibCST/pull/1038 + +## Added +* Allow `Element::codegen` to be used by external users by @Wilfred in https://github.com/Instagram/LibCST/pull/1071 + +## Fixed +* Fix parsing list matchers without explicit brackets by @zsol in https://github.com/Instagram/LibCST/pull/1097 +* installing rustc/cargo for mybinder demo by @aleivag in https://github.com/Instagram/LibCST/pull/1083 +* fix filepathprovider generic type by @kinto0 in https://github.com/Instagram/LibCST/pull/1036 + +## New Contributors +* @itamaro made their first contribution in https://github.com/Instagram/LibCST/pull/1039 +* @kinto0 made their first contribution in https://github.com/Instagram/LibCST/pull/1036 +* @dtolnay made their first contribution in https://github.com/Instagram/LibCST/pull/1063 +* @anonymousdouble made their first contribution in https://github.com/Instagram/LibCST/pull/1082 +* @aleivag made their first contribution in https://github.com/Instagram/LibCST/pull/1083 +* @Wilfred made their first contribution in https://github.com/Instagram/LibCST/pull/1071 +* @diliop made their first contribution in https://github.com/Instagram/LibCST/pull/1106 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.1.0...v1.2.0 + # 1.1.0 - 2023-10-05 ## Added diff --git a/native/Cargo.lock b/native/Cargo.lock index 0ae24307..4fb1b7b6 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -355,7 +355,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.1.0" +version = "1.2.0" dependencies = [ "chic", "criterion", @@ -373,7 +373,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.1.0" +version = "1.2.0" dependencies = [ "quote", "syn 2.0.41", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 737f71b9..9451f161 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.1.0" +version = "1.2.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" memchr = "2.5.0" -libcst_derive = { path = "../libcst_derive", version = "1.1.0" } +libcst_derive = { path = "../libcst_derive", version = "1.2.0" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 5d7064e9..06903a0a 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.1.0" +version = "1.2.0" edition = "2018" description = "Proc macro helpers for libcst." license-file = "LICENSE" From f6493dbe8dacd05b4582921facbd903869b42ddd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 21:11:33 +0000 Subject: [PATCH 457/632] Bump ufmt from 2.3.0 to 2.5.1 (#1114) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.3.0 to 2.5.1. - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.3.0...v2.5.1) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c7ffee7a..0404bd3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ dev = [ "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", - "ufmt==2.3.0", + "ufmt==2.5.1", "usort==1.0.7", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", From 36e791ebe5f008af91a2ccc6be4900e69fad190d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 17:39:10 -0800 Subject: [PATCH 458/632] Bump usort from 1.0.7 to 1.0.8.post1 (#1109) Bumps [usort](https://github.com/facebook/usort) from 1.0.7 to 1.0.8.post1. - [Changelog](https://github.com/facebook/usort/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/usort/compare/v1.0.7...v1.0.8.post1) --- updated-dependencies: - dependency-name: usort dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0404bd3a..f8a2cfab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ dev = [ "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", "ufmt==2.5.1", - "usort==1.0.7", + "usort==1.0.8.post1", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", "jinja2==3.1.3", From 5a50be26f16f9b2366459fb4f41b53fe390d1711 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 16:57:57 -0700 Subject: [PATCH 459/632] Bump pypa/cibuildwheel from 2.16.5 to 2.17.0 (#1119) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.5 to 2.17.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.5...v2.17.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 14ed9d67..cd8ae915 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.16.5 + uses: pypa/cibuildwheel@v2.17.0 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From ffdea4d15770d47f90121f93e5ae69c4adfed723 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 21 Mar 2024 17:11:45 -0700 Subject: [PATCH 460/632] Build native mac arm64 wheels (#1121) --- .github/workflows/build.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index cd8ae915..4ea76c45 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,7 +10,8 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-latest, ubuntu-latest, windows-latest] + # macos-13 is an intel runner, macos-14 is apple silicon + os: [macos-13, macos-14, ubuntu-latest, windows-latest] env: SCCACHE_VERSION: 0.2.13 CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" From a35a05f056b259012018d5fc0859c3c7f97321ad Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 22 Mar 2024 09:05:20 -0700 Subject: [PATCH 461/632] ci: only build host-native wheels for macos --- .github/workflows/build.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4ea76c45..f3ed96ae 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -22,7 +22,6 @@ jobs: CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME' CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_*" CIBW_ARCHS_LINUX: auto aarch64 - CIBW_ARCHS_MACOS: x86_64 arm64 CIBW_BUILD_VERBOSITY: 1 steps: - uses: actions/checkout@v4 From 2ffca1084507fcee6d59ef22d394c92991bede3a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 3 Apr 2024 19:50:14 +0100 Subject: [PATCH 462/632] remove typing dependencies (#1126) Summary: This PR removes the `typing_extensions` and `typing_inspect` dependencies as we can now rely on the built-in `typing` module since Python 3.9. Test Plan: existing tests --- libcst/_exceptions.py | 4 +-- libcst/_nodes/expression.py | 4 +-- libcst/_type_enforce.py | 26 +++++++++---------- libcst/codegen/gen_matcher_classes.py | 3 +-- .../codemod/commands/convert_type_comments.py | 8 +++--- libcst/matchers/__init__.py | 4 +-- libcst/tests/test_type_enforce.py | 3 +-- pyproject.toml | 6 +---- 8 files changed, 22 insertions(+), 36 deletions(-) diff --git a/libcst/_exceptions.py b/libcst/_exceptions.py index 0ba689c2..5359ca3c 100644 --- a/libcst/_exceptions.py +++ b/libcst/_exceptions.py @@ -4,9 +4,7 @@ # LICENSE file in the root directory of this source tree. from enum import auto, Enum -from typing import Any, Callable, Iterable, Optional, Sequence, Tuple, Union - -from typing_extensions import final +from typing import Any, Callable, final, Iterable, Optional, Sequence, Tuple, Union from libcst._parser.parso.pgen2.generator import ReservedString from libcst._parser.parso.python.token import PythonTokenTypes, TokenType diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 074fc71f..75f7da13 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -15,9 +15,7 @@ from tokenize import ( Imagnumber as IMAGNUMBER_RE, Intnumber as INTNUMBER_RE, ) -from typing import Callable, Generator, Optional, Sequence, Union - -from typing_extensions import Literal +from typing import Callable, Generator, Literal, Optional, Sequence, Union from libcst._add_slots import add_slots from libcst._maybe_sentinel import MaybeSentinel diff --git a/libcst/_type_enforce.py b/libcst/_type_enforce.py index b13c41de..dded4525 100644 --- a/libcst/_type_enforce.py +++ b/libcst/_type_enforce.py @@ -5,17 +5,20 @@ from typing import ( Any, + ClassVar, ForwardRef, + get_args, + get_origin, Iterable, + Literal, Mapping, MutableMapping, MutableSequence, Tuple, + TypeVar, + Union, ) -from typing_extensions import Literal -from typing_inspect import get_args, get_origin, is_classvar, is_typevar, is_union_type - def is_value_of_type( # noqa: C901 "too complex" # pyre-fixme[2]: Parameter annotation cannot be `Any`. @@ -48,11 +51,11 @@ def is_value_of_type( # noqa: C901 "too complex" - Forward Refs -- use `typing.get_type_hints` to resolve these - Type[...] """ - if is_classvar(expected_type): + if expected_type is ClassVar or get_origin(expected_type) is ClassVar: classvar_args = get_args(expected_type) expected_type = (classvar_args[0] or Any) if classvar_args else Any - if is_typevar(expected_type): + if type(expected_type) is TypeVar: # treat this the same as Any # TODO: evaluate bounds return True @@ -62,13 +65,13 @@ def is_value_of_type( # noqa: C901 "too complex" if expected_origin_type == Any: return True - elif is_union_type(expected_type): + elif expected_type is Union or get_origin(expected_type) is Union: return any( is_value_of_type(value, subtype) for subtype in expected_type.__args__ ) elif isinstance(expected_origin_type, type(Literal)): - literal_values = get_args(expected_type, evaluate=True) + literal_values = get_args(expected_type) return any(value == literal for literal in literal_values) elif isinstance(expected_origin_type, ForwardRef): @@ -82,14 +85,11 @@ def is_value_of_type( # noqa: C901 "too complex" if not isinstance(value, tuple): return False - type_args = get_args(expected_type, evaluate=True) + type_args = get_args(expected_type) if len(type_args) == 0: # `Tuple` (no subscript) is implicitly `Tuple[Any, ...]` return True - if type_args is None: - return True - if len(value) != len(type_args): return False # TODO: Handle `Tuple[T, ...]` like `Iterable[T]` @@ -106,7 +106,7 @@ def is_value_of_type( # noqa: C901 "too complex" if not issubclass(type(value), expected_origin_type): return False - type_args = get_args(expected_type, evaluate=True) + type_args = get_args(expected_type) if len(type_args) == 0: # `Mapping` (no subscript) is implicitly `Mapping[Any, Any]`. return True @@ -143,7 +143,7 @@ def is_value_of_type( # noqa: C901 "too complex" if not issubclass(type(value), expected_origin_type): return False - type_args = get_args(expected_type, evaluate=True) + type_args = get_args(expected_type) if len(type_args) == 0: # `Iterable` (no subscript) is implicitly `Iterable[Any]`. return True diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index 8ac8a466..b7940f97 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -443,8 +443,7 @@ generated_code.append("") generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes") generated_code.append("from dataclasses import dataclass") -generated_code.append("from typing import Optional, Sequence, Union") -generated_code.append("from typing_extensions import Literal") +generated_code.append("from typing import Literal, Optional, Sequence, Union") generated_code.append("import libcst as cst") generated_code.append("") generated_code.append( diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index e2c6e71c..5863d94b 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -11,8 +11,6 @@ import functools import sys from typing import cast, Dict, List, Optional, Sequence, Set, Tuple, Union -from typing_extensions import TypeAlias - import libcst as cst import libcst.matchers as m from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand @@ -143,9 +141,9 @@ class _ArityError(Exception): pass -UnpackedBindings: TypeAlias = Union[cst.BaseExpression, List["UnpackedBindings"]] -UnpackedAnnotations: TypeAlias = Union[str, List["UnpackedAnnotations"]] -TargetAnnotationPair: TypeAlias = Tuple[cst.BaseExpression, str] +UnpackedBindings = Union[cst.BaseExpression, List["UnpackedBindings"]] +UnpackedAnnotations = Union[str, List["UnpackedAnnotations"]] +TargetAnnotationPair = Tuple[cst.BaseExpression, str] class AnnotationSpreader: diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 7e3761b8..e9698462 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -6,9 +6,7 @@ # This file was generated by libcst.codegen.gen_matcher_classes from dataclasses import dataclass -from typing import Optional, Sequence, Union - -from typing_extensions import Literal +from typing import Literal, Optional, Sequence, Union import libcst as cst from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit diff --git a/libcst/tests/test_type_enforce.py b/libcst/tests/test_type_enforce.py index 7c01b82b..f6fecc7d 100644 --- a/libcst/tests/test_type_enforce.py +++ b/libcst/tests/test_type_enforce.py @@ -11,6 +11,7 @@ from typing import ( Dict, Iterable, List, + Literal, Mapping, MutableMapping, NamedTuple, @@ -23,8 +24,6 @@ from typing import ( Union, ) -from typing_extensions import Literal - from libcst._type_enforce import is_value_of_type from libcst.testing.utils import data_provider, UnitTest diff --git a/pyproject.toml b/pyproject.toml index f8a2cfab..d2481a1f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,11 +16,7 @@ classifiers = [ "Programming Language :: Python :: 3.12", ] requires-python = ">=3.9" -dependencies = [ - "typing_extensions>=3.7.4.2", - "typing_inspect>=0.4.0", - "pyyaml>=5.2", -] +dependencies = ["pyyaml>=5.2"] [project.optional-dependencies] dev = [ From 8b334740015d248e0ee90072b5d64a4cd7cd6e7b Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 3 Apr 2024 19:54:30 +0100 Subject: [PATCH 463/632] bump version to 1.3.0 Summary: Test Plan: --- CHANGELOG.md | 5 +++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 228e119d..a3f41d8b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +# 1.3.0 - 2024-04-03 + +## Updated +* Removed dependencies on `typing_extensions` and `typing_inspect` by @zsol in https://github.com/Instagram/LibCST/pull/1126 + # 1.2.0 - 2024-02-19 ## Updated diff --git a/native/Cargo.lock b/native/Cargo.lock index 4fb1b7b6..a300b587 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -355,7 +355,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.2.0" +version = "1.3.0" dependencies = [ "chic", "criterion", @@ -373,7 +373,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.2.0" +version = "1.3.0" dependencies = [ "quote", "syn 2.0.41", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 9451f161..ecaffd4a 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.2.0" +version = "1.3.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" memchr = "2.5.0" -libcst_derive = { path = "../libcst_derive", version = "1.2.0" } +libcst_derive = { path = "../libcst_derive", version = "1.3.0" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 06903a0a..678cadea 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.2.0" +version = "1.3.0" edition = "2018" description = "Proc macro helpers for libcst." license-file = "LICENSE" From 4fb66a33e6d454622bdb82bdb7947ee576820ef0 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 3 Apr 2024 21:10:44 +0100 Subject: [PATCH 464/632] remove mypy_extensions import (#1128) --- libcst/metadata/type_inference_provider.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index 06ed0c19..c9c1fc9a 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -6,9 +6,7 @@ import json import subprocess from pathlib import Path -from typing import Dict, List, Mapping, Optional, Sequence, Tuple - -from mypy_extensions import TypedDict +from typing import Dict, List, Mapping, Optional, Sequence, Tuple, TypedDict import libcst as cst from libcst._position import CodePosition, CodeRange From 9f54920d9d32a519aa1702485ed56c55d8189a0c Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 3 Apr 2024 21:13:08 +0100 Subject: [PATCH 465/632] bump version to 1.3.1 --- CHANGELOG.md | 5 +++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3f41d8b..6f5407c0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +# 1.3.1 - 2024-04-03 + +## Fixed +* ImportError due to missing `mypy_extensions` dependency by @zsol in https://github.com/Instagram/LibCST/pull/1128 + # 1.3.0 - 2024-04-03 ## Updated diff --git a/native/Cargo.lock b/native/Cargo.lock index a300b587..77390083 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -355,7 +355,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.3.0" +version = "1.3.1" dependencies = [ "chic", "criterion", @@ -373,7 +373,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.3.0" +version = "1.3.1" dependencies = [ "quote", "syn 2.0.41", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index ecaffd4a..a0d92de3 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.3.0" +version = "1.3.1" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" memchr = "2.5.0" -libcst_derive = { path = "../libcst_derive", version = "1.3.0" } +libcst_derive = { path = "../libcst_derive", version = "1.3.1" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 678cadea..127c8378 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.3.0" +version = "1.3.1" edition = "2018" description = "Proc macro helpers for libcst." license-file = "LICENSE" From 0d087acdf6ae38c2fd96d942123d8e53731bf7b8 Mon Sep 17 00:00:00 2001 From: Sergii Dymchenko Date: Fri, 3 May 2024 14:25:37 -0700 Subject: [PATCH 466/632] Typo fix FullRepoManager (#1138) --- docs/source/metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index b0b12800..e7e8bdc3 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -243,7 +243,7 @@ In Python, type checkers like `Mypy `_ or and infer types for expressions. :class:`~libcst.metadata.TypeInferenceProvider` is provided by `Pyre Query API `__ which requires `setup watchman `_ for incremental typechecking. -:class:`~libcst.metadata.FullRepoManger` is built for manage the inter process communication to Pyre. +:class:`~libcst.metadata.FullRepoManager` is built for manage the inter process communication to Pyre. .. autoclass:: libcst.metadata.TypeInferenceProvider :no-undoc-members: From e9dc135ae4335a6a913adab6dde7b8037958122f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebasti=C3=A1n=20Ram=C3=ADrez?= Date: Fri, 3 May 2024 14:27:20 -0700 Subject: [PATCH 467/632] Fix tiny typo in `docs/source/metadata.rst` (#1134) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✏️ Fix typo in metadata.rst * ✏️ Fix typo --- docs/source/metadata.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index e7e8bdc3..9e450c97 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -94,7 +94,7 @@ declaring one of :class:`~libcst.metadata.PositionProvider` or most cases, :class:`~libcst.metadata.PositionProvider` is what you probably want. -Node positions are is represented with :class:`~libcst.metadata.CodeRange` +Node positions are represented with :class:`~libcst.metadata.CodeRange` objects. See :ref:`the above example`. .. autoclass:: libcst.metadata.PositionProvider @@ -134,7 +134,7 @@ New scopes are created for classes, functions, and comprehensions. Other block constructs like conditional statements, loops, and try…except don't create their own scope. -There are five different type of scope in Python: +There are five different types of scopes in Python: :class:`~libcst.metadata.BuiltinScope`, :class:`~libcst.metadata.GlobalScope`, :class:`~libcst.metadata.ClassScope`, From 0713a3554842c677a5a168bcdb2601939b1535c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebasti=C3=A1n=20Ram=C3=ADrez?= Date: Fri, 3 May 2024 14:27:37 -0700 Subject: [PATCH 468/632] Fix typo in `docs/source/scope_tutorial.ipynb` (#1135) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✏️ Fix typo in `docs/source/scope_tutorial.ipynb` * ✏️ Fix another typo * ✏️ Fix typos * ✏️ Fix typos --- docs/source/scope_tutorial.ipynb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/scope_tutorial.ipynb b/docs/source/scope_tutorial.ipynb index e4d4393d..179e2ed7 100644 --- a/docs/source/scope_tutorial.ipynb +++ b/docs/source/scope_tutorial.ipynb @@ -90,7 +90,7 @@ "source": [ "Warn on unused imports and undefined references\n", "===============================================\n", - "To find all unused imports, we iterate through :attr:`~libcst.metadata.Scope.assignments` and an assignment is unused when its :attr:`~libcst.metadata.BaseAssignment.references` is empty. To find all undefined references, we iterate through :attr:`~libcst.metadata.Scope.accesses` (we focus on :class:`~libcst.Import`/:class:`~libcst.ImportFrom` assignments) and an access is undefined reference when its :attr:`~libcst.metadata.Access.referents` is empty. When reporting the warning to developer, we'll want to report the line number and column offset along with the suggestion to make it more clear. We can get position information from :class:`~libcst.metadata.PositionProvider` and print the warnings as follows.\n" + "To find all unused imports, we iterate through :attr:`~libcst.metadata.Scope.assignments` and an assignment is unused when its :attr:`~libcst.metadata.BaseAssignment.references` is empty. To find all undefined references, we iterate through :attr:`~libcst.metadata.Scope.accesses` (we focus on :class:`~libcst.Import`/:class:`~libcst.ImportFrom` assignments) and an access is undefined reference when its :attr:`~libcst.metadata.Access.referents` is empty. When reporting the warning to the developer, we'll want to report the line number and column offset along with the suggestion to make it more clear. We can get position information from :class:`~libcst.metadata.PositionProvider` and print the warnings as follows.\n" ] }, { @@ -136,13 +136,13 @@ "Automatically Remove Unused Import\n", "==================================\n", "Unused import is a commmon code suggestion provided by lint tool like `flake8 F401 `_ ``imported but unused``.\n", - "Even though reporting unused import is already useful, with LibCST we can provide automatic fix to remove unused import. That can make the suggestion more actionable and save developer's time.\n", + "Even though reporting unused imports is already useful, with LibCST we can provide an automatic fix to remove unused imports. That can make the suggestion more actionable and save developer's time.\n", "\n", "An import statement may import multiple names, we want to remove those unused names from the import statement. If all the names in the import statement are not used, we remove the entire import.\n", "To remove the unused name, we implement ``RemoveUnusedImportTransformer`` by subclassing :class:`~libcst.CSTTransformer`. We overwrite ``leave_Import`` and ``leave_ImportFrom`` to modify the import statements.\n", - "When we find the import node in lookup table, we iterate through all ``names`` and keep used names in ``names_to_keep``.\n", + "When we find the import node in the lookup table, we iterate through all ``names`` and keep used names in ``names_to_keep``.\n", "If ``names_to_keep`` is empty, all names are unused and we remove the entire import node.\n", - "Otherwise, we update the import node and just removing partial names." + "Otherwise, we update the import node and just remove partial names." ] }, { @@ -195,7 +195,7 @@ "raw_mimetype": "text/restructuredtext" }, "source": [ - "After the transform, we use ``.code`` to generate fixed code and all unused names are fixed as expected! The difflib is used to show only changed part and only import lines are updated as expected." + "After the transform, we use ``.code`` to generate the fixed code and all unused names are fixed as expected! The difflib is used to show only the changed part and only imported lines are updated as expected." ] }, { From 82f804a66a88c5a9e8f96f39b4dde5afeb9b99fb Mon Sep 17 00:00:00 2001 From: Camillo Date: Fri, 3 May 2024 14:36:20 -0700 Subject: [PATCH 469/632] Fix Literal parse error in RemoveImportsVisitor (#1130) --- .../visitors/_gather_string_annotation_names.py | 11 ++++++++++- .../tests/test_gather_string_annotation_names.py | 11 +++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/visitors/_gather_string_annotation_names.py b/libcst/codemod/visitors/_gather_string_annotation_names.py index 0f1b926b..b7268ffc 100644 --- a/libcst/codemod/visitors/_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/_gather_string_annotation_names.py @@ -44,6 +44,11 @@ class GatherNamesFromStringAnnotationsVisitor(ContextAwareVisitor): def leave_Annotation(self, original_node: cst.Annotation) -> None: self._annotation_stack.pop() + def visit_Subscript(self, node: cst.Subscript) -> bool: + qnames = self.get_metadata(QualifiedNameProvider, node) + # A Literal["foo"] should not be interpreted as a use of the symbol "foo". + return not any(qn.name == "typing.Literal" for qn in qnames) + def visit_Call(self, node: cst.Call) -> bool: qnames = self.get_metadata(QualifiedNameProvider, node) if any(qn.name in self._typing_functions for qn in qnames): @@ -71,7 +76,11 @@ class GatherNamesFromStringAnnotationsVisitor(ContextAwareVisitor): value = node.evaluated_value if value is None: return - mod = cst.parse_module(value) + try: + mod = cst.parse_module(value) + except cst.ParserSyntaxError: + # Not all strings inside a type annotation are meant to be valid Python code. + return extracted_nodes = m.extractall( mod, m.Name( diff --git a/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py b/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py index f8a11fcc..d3c622a3 100644 --- a/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py @@ -80,3 +80,14 @@ class TestGatherNamesFromStringAnnotationsVisitor(UnitTest): visitor.names, {"api", "api.http_exceptions", "api.http_exceptions.HttpException"}, ) + + def test_literals(self) -> None: + visitor = self.gather_names( + """ + from typing import Literal + a: Literal["in"] + b: list[Literal["1x"]] + c: Literal["Any"] + """ + ) + self.assertEqual(visitor.names, set()) From 18a863741eb052a8929df43e0e0027b2007b9184 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 3 May 2024 22:38:42 +0100 Subject: [PATCH 470/632] Update maturin requirement from <1.5,>=0.8.3 to >=0.8.3,<1.6 (#1117) Updates the requirements on [maturin](https://github.com/pyo3/maturin) to permit the latest version. - [Release notes](https://github.com/pyo3/maturin/releases) - [Changelog](https://github.com/PyO3/maturin/blob/main/Changelog.md) - [Commits](https://github.com/pyo3/maturin/compare/v0.8.3...v1.5.0) --- updated-dependencies: - dependency-name: maturin dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d2481a1f..d794fd8d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dev = [ "hypothesis>=4.36.0", "hypothesmith>=0.0.4", "jupyter>=1.0.0", - "maturin>=0.8.3,<1.5", + "maturin>=0.8.3,<1.6", "nbsphinx>=0.4.2", "prompt-toolkit>=2.0.9", "pyre-check==0.9.18; platform_system != 'Windows'", From a068f4bdd1af98149e634809ca9b0f53f8f45f99 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 11:42:40 +0100 Subject: [PATCH 471/632] Bump jinja2 from 3.1.3 to 3.1.4 (#1140) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.3 to 3.1.4. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.3...3.1.4) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d794fd8d..bb35bdad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ dev = [ "usort==1.0.8.post1", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", - "jinja2==3.1.3", + "jinja2==3.1.4", ] [project.urls] From 20ed6c49c4e41f9a22dd2387acd833a4930dd5c2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 11:42:51 +0100 Subject: [PATCH 472/632] Bump ufmt from 2.5.1 to 2.6.0 (#1139) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.5.1 to 2.6.0. - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.5.1...v2.6.0) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bb35bdad..260c8655 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ dev = [ "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", - "ufmt==2.5.1", + "ufmt==2.6.0", "usort==1.0.8.post1", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", From 942dc8007a543686fbe5af4ed8538c4bea28de51 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 18:02:33 +0100 Subject: [PATCH 473/632] Bump codecov/codecov-action from 3 to 4 (#1103) * Bump codecov/codecov-action from 3 to 4 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v3...v4) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * set codecov token --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Zsolt Dollenstein --- .github/workflows/ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1f1aa755..ce61c654 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -89,7 +89,9 @@ jobs: run: | hatch run coverage run setup.py test hatch run coverage xml -i - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: files: coverage.xml fail_ci_if_error: true From e7b009655a9f9a2dcd059b125308bc8ac022401b Mon Sep 17 00:00:00 2001 From: zaicruvoir1rominet <105832258+zaicruvoir1rominet@users.noreply.github.com> Date: Sun, 12 May 2024 21:40:07 +0200 Subject: [PATCH 474/632] Update CONTRIBUTING.md (#1142) * Update CONTRIBUTING.md * Fix repo link * Fix line break getting removed --- CONTRIBUTING.md | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9af09650..477dbdcd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,12 +9,30 @@ pull requests. ## Pull Requests We actively welcome your pull requests. -1. Fork the repo and create your branch from `main`. -2. If you've added code that should be tested, add tests. -3. If you've changed APIs, update the documentation. -4. Ensure the test suite passes by `python -m unittest`. -5. Make sure your code lints. -6. If you haven't already, complete the Contributor License Agreement ("CLA"). +### Setup Your Environment + +1. Install a [Rust toolchain](https://rustup.rs) and [hatch](https://hatch.pypa.io) +2. Fork the repo on your side +3. Clone the repo + > git clone [your fork.git] libcst + > cd libcst +4. Sync with the main libcst version package + > git fetch --tags https://github.com/instagram/libcst +5. Setup the env + > hatch env create + +You are now ready to create your own branch from main, and contribute. +Please provide tests (using unittest), and update the documentation (both docstrings +and sphinx doc), if applicable. + +### Before Submitting Your Pull Request + +1. Format your code + > hatch run format +2. Test your changes + > hatch run test +3. Check linters + > hatch run lint ## Contributor License Agreement ("CLA") In order to accept your pull request, we need you to submit a CLA. You only need From 6783244eab6f38bf456cd60090201c31c5ec9357 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 13 May 2024 09:47:28 +0100 Subject: [PATCH 475/632] Add typechecker to CONTRIBUTING.md --- CONTRIBUTING.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 477dbdcd..c69049cd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -29,9 +29,11 @@ and sphinx doc), if applicable. 1. Format your code > hatch run format -2. Test your changes +2. Run the type checker + > hatch run typecheck +3. Test your changes > hatch run test -3. Check linters +4. Check linters > hatch run lint ## Contributor License Agreement ("CLA") From efc53af60885853bfbd87564fa01ac25fcd2b94e Mon Sep 17 00:00:00 2001 From: zaicruvoir1rominet <105832258+zaicruvoir1rominet@users.noreply.github.com> Date: Mon, 13 May 2024 11:20:47 +0200 Subject: [PATCH 476/632] Add helper functions for common ways of filtering nodes (#1137) * Make the nodes fields filtering process - from libcst.tool - public, so that other libraries may provide their own custom representation of LibCST graphs. * Create functions to access & filter CST-node fields (with appropriate docstrings & tests), in libcst.helpers * Add new CST-node fields functions to helpers documentation. --- docs/source/helpers.rst | 15 ++ libcst/helpers/__init__.py | 14 + libcst/helpers/node_fields.py | 128 +++++++++ libcst/helpers/tests/test_node_fields.py | 314 +++++++++++++++++++++++ libcst/tool.py | 91 +------ 5 files changed, 482 insertions(+), 80 deletions(-) create mode 100644 libcst/helpers/node_fields.py create mode 100644 libcst/helpers/tests/test_node_fields.py diff --git a/docs/source/helpers.rst b/docs/source/helpers.rst index e4b94d2b..3cf5abfb 100644 --- a/docs/source/helpers.rst +++ b/docs/source/helpers.rst @@ -32,3 +32,18 @@ Functions that assist in traversing an existing LibCST tree. .. autofunction:: libcst.helpers.get_full_name_for_node .. autofunction:: libcst.helpers.get_full_name_for_node_or_raise .. autofunction:: libcst.helpers.ensure_type + +Node fields filtering Helpers +----------------------------- + +Function that assist when handling CST nodes' fields. + +.. autofunction:: libcst.helpers.filter_node_fields + +And lower level functions: + +.. autofunction:: libcst.helpers.get_node_fields +.. autofunction:: libcst.helpers.is_whitespace_node_field +.. autofunction:: libcst.helpers.is_syntax_node_field +.. autofunction:: libcst.helpers.is_default_node_field +.. autofunction:: libcst.helpers.get_field_default_value diff --git a/libcst/helpers/__init__.py b/libcst/helpers/__init__.py index c7fdf9b1..817acc39 100644 --- a/libcst/helpers/__init__.py +++ b/libcst/helpers/__init__.py @@ -25,6 +25,14 @@ from libcst.helpers.module import ( insert_header_comments, ModuleNameAndPackage, ) +from libcst.helpers.node_fields import ( + filter_node_fields, + get_field_default_value, + get_node_fields, + is_default_node_field, + is_syntax_node_field, + is_whitespace_node_field, +) __all__ = [ "calculate_module_and_package", @@ -42,4 +50,10 @@ __all__ = [ "parse_template_statement", "parse_template_expression", "ModuleNameAndPackage", + "get_node_fields", + "get_field_default_value", + "is_whitespace_node_field", + "is_syntax_node_field", + "is_default_node_field", + "filter_node_fields", ] diff --git a/libcst/helpers/node_fields.py b/libcst/helpers/node_fields.py new file mode 100644 index 00000000..418d6cbb --- /dev/null +++ b/libcst/helpers/node_fields.py @@ -0,0 +1,128 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from __future__ import annotations + +import dataclasses +from typing import TYPE_CHECKING + +from libcst import IndentedBlock, Module +from libcst._nodes.deep_equals import deep_equals + +if TYPE_CHECKING: + from typing import Sequence + + from libcst import CSTNode + + +def get_node_fields(node: CSTNode) -> Sequence[dataclasses.Field[CSTNode]]: + """ + Returns the sequence of a given CST-node's fields. + """ + return dataclasses.fields(node) + + +def is_whitespace_node_field(node: CSTNode, field: dataclasses.Field[CSTNode]) -> bool: + """ + Returns True if a given CST-node's field is a whitespace-related field + (whitespace, indent, header, footer, etc.). + """ + if "whitespace" in field.name: + return True + if "leading_lines" in field.name: + return True + if "lines_after_decorators" in field.name: + return True + if isinstance(node, (IndentedBlock, Module)) and field.name in [ + "header", + "footer", + ]: + return True + if isinstance(node, IndentedBlock) and field.name == "indent": + return True + return False + + +def is_syntax_node_field(node: CSTNode, field: dataclasses.Field[CSTNode]) -> bool: + """ + Returns True if a given CST-node's field is a syntax-related field + (colon, semicolon, dot, encoding, etc.). + """ + if isinstance(node, Module) and field.name in [ + "encoding", + "default_indent", + "default_newline", + "has_trailing_newline", + ]: + return True + type_str = repr(field.type) + if ( + "Sentinel" in type_str + and field.name not in ["star_arg", "star", "posonly_ind"] + and "whitespace" not in field.name + ): + # This is a value that can optionally be specified, so its + # definitely syntax. + return True + + for name in ["Semicolon", "Colon", "Comma", "Dot", "AssignEqual"]: + # These are all nodes that exist for separation syntax + if name in type_str: + return True + + return False + + +def get_field_default_value(field: dataclasses.Field[CSTNode]) -> object: + """ + Returns the default value of a CST-node's field. + """ + if field.default_factory is not dataclasses.MISSING: + # pyre-fixme[29]: `Union[dataclasses._MISSING_TYPE, + # dataclasses._DefaultFactory[object]]` is not a function. + return field.default_factory() + return field.default + + +def is_default_node_field(node: CSTNode, field: dataclasses.Field[CSTNode]) -> bool: + """ + Returns True if a given CST-node's field has its default value. + """ + return deep_equals(getattr(node, field.name), get_field_default_value(field)) + + +def filter_node_fields( + node: CSTNode, + *, + show_defaults: bool, + show_syntax: bool, + show_whitespace: bool, +) -> Sequence[dataclasses.Field[CSTNode]]: + """ + Returns a filtered sequence of a CST-node's fields. + + Setting ``show_whitespace`` to ``False`` will filter whitespace fields. + + Setting ``show_defaults`` to ``False`` will filter fields if their value is equal to + the default value ; while respecting the value of ``show_whitespace``. + + Setting ``show_syntax`` to ``False`` will filter syntax fields ; while respecting + the value of ``show_whitespace`` & ``show_defaults``. + """ + + fields: Sequence[dataclasses.Field[CSTNode]] = dataclasses.fields(node) + # Hide all fields prefixed with "_" + fields = [f for f in fields if f.name[0] != "_"] + # Filter whitespace nodes if needed + if not show_whitespace: + fields = [f for f in fields if not is_whitespace_node_field(node, f)] + # Filter values which aren't changed from their defaults + if not show_defaults: + fields = [f for f in fields if not is_default_node_field(node, f)] + # Filter out values which aren't interesting if needed + if not show_syntax: + fields = [f for f in fields if not is_syntax_node_field(node, f)] + + return fields diff --git a/libcst/helpers/tests/test_node_fields.py b/libcst/helpers/tests/test_node_fields.py new file mode 100644 index 00000000..61d5ec21 --- /dev/null +++ b/libcst/helpers/tests/test_node_fields.py @@ -0,0 +1,314 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from unittest import TestCase + +from libcst import ( + Annotation, + CSTNode, + FunctionDef, + IndentedBlock, + Module, + Param, + parse_module, + Pass, + Semicolon, + SimpleStatementLine, +) + +from libcst.helpers import ( + get_node_fields, + is_default_node_field, + is_syntax_node_field, + is_whitespace_node_field, +) + + +class _NodeFieldsTest(TestCase): + """Node fields related tests.""" + + module: Module + annotation: Annotation + param: Param + _pass: Pass + semicolon: Semicolon + statement: SimpleStatementLine + indent: IndentedBlock + function: FunctionDef + + @classmethod + def setUpClass(cls) -> None: + """Parse a simple CST and references interesting nodes.""" + cls.module = parse_module( + "def foo(a: str) -> None:\n pass ; pass\n return\n" + ) + # /!\ Direct access to nodes + # This is done for test purposes on a known CST + # -> For "real code", use visitors to do this "the correct way" + + # pyre-ignore[8]: direct access for tests + cls.function = cls.module.body[0] + cls.param = cls.function.params.params[0] + # pyre-ignore[8]: direct access for tests + cls.annotation = cls.param.annotation + # pyre-ignore[8]: direct access for tests + cls.indent = cls.function.body + # pyre-ignore[8]: direct access for tests + cls.statement = cls.indent.body[0] + # pyre-ignore[8]: direct access for tests + cls._pass = cls.statement.body[0] + # pyre-ignore[8]: direct access for tests + cls.semicolon = cls.statement.body[0].semicolon + + def test__cst_correctness(self) -> None: + """Test that the CST is correctly parsed.""" + self.assertIsInstance(self.module, Module) + self.assertIsInstance(self.annotation, Annotation) + self.assertIsInstance(self.param, Param) + self.assertIsInstance(self._pass, Pass) + self.assertIsInstance(self.semicolon, Semicolon) + self.assertIsInstance(self.statement, SimpleStatementLine) + self.assertIsInstance(self.indent, IndentedBlock) + self.assertIsInstance(self.function, FunctionDef) + + +class IsWhitespaceNodeFieldTest(_NodeFieldsTest): + """``is_whitespace_node_field`` tests.""" + + def _check_fields(self, is_filtered_field: dict[str, bool], node: CSTNode) -> None: + fields = get_node_fields(node) + self.assertEqual(len(is_filtered_field), len(fields)) + for field in fields: + self.assertEqual( + is_filtered_field[field.name], + is_whitespace_node_field(node, field), + f"Node ``{node.__class__.__qualname__}`` field '{field.name}' " + f"{'should have' if is_filtered_field[field.name] else 'should not have'} " + "been filtered by ``is_whitespace_node_field``", + ) + + def test_module(self) -> None: + """Check if a CST Module node is correctly filtered.""" + is_filtered_field = { + "body": False, + "header": True, + "footer": True, + "encoding": False, + "default_indent": False, + "default_newline": False, + "has_trailing_newline": False, + } + self._check_fields(is_filtered_field, self.module) + + def test_annotation(self) -> None: + """Check if a CST Annotation node is correctly filtered.""" + is_filtered_field = { + "annotation": False, + "whitespace_before_indicator": True, + "whitespace_after_indicator": True, + } + self._check_fields(is_filtered_field, self.annotation) + + def test_param(self) -> None: + """Check if a CST Param node is correctly filtered.""" + is_filtered_field = { + "name": False, + "annotation": False, + "equal": False, + "default": False, + "comma": False, + "star": False, + "whitespace_after_star": True, + "whitespace_after_param": True, + } + self._check_fields(is_filtered_field, self.param) + + def test_semicolon(self) -> None: + """Check if a CST Semicolon node is correctly filtered.""" + is_filtered_field = { + "whitespace_before": True, + "whitespace_after": True, + } + self._check_fields(is_filtered_field, self.semicolon) + + def test_statement(self) -> None: + """Check if a CST SimpleStatementLine node is correctly filtered.""" + is_filtered_field = { + "body": False, + "leading_lines": True, + "trailing_whitespace": True, + } + self._check_fields(is_filtered_field, self.statement) + + def test_indent(self) -> None: + """Check if a CST IndentedBlock node is correctly filtered.""" + is_filtered_field = { + "body": False, + "header": True, + "indent": True, + "footer": True, + } + self._check_fields(is_filtered_field, self.indent) + + def test_function(self) -> None: + """Check if a CST FunctionDef node is correctly filtered.""" + is_filtered_field = { + "name": False, + "params": False, + "body": False, + "decorators": False, + "returns": False, + "asynchronous": False, + "leading_lines": True, + "lines_after_decorators": True, + "whitespace_after_def": True, + "whitespace_after_name": True, + "whitespace_before_params": True, + "whitespace_before_colon": True, + "type_parameters": False, + "whitespace_after_type_parameters": True, + } + self._check_fields(is_filtered_field, self.function) + + +class IsSyntaxNodeFieldTest(_NodeFieldsTest): + """``is_syntax_node_field`` tests.""" + + def _check_fields(self, is_filtered_field: dict[str, bool], node: CSTNode) -> None: + fields = get_node_fields(node) + self.assertEqual(len(is_filtered_field), len(fields)) + for field in fields: + self.assertEqual( + is_filtered_field[field.name], + is_syntax_node_field(node, field), + f"Node ``{node.__class__.__qualname__}`` field '{field.name}' " + f"{'should have' if is_filtered_field[field.name] else 'should not have'} " + "been filtered by ``is_syntax_node_field``", + ) + + def test_module(self) -> None: + """Check if a CST Module node is correctly filtered.""" + is_filtered_field = { + "body": False, + "header": False, + "footer": False, + "encoding": True, + "default_indent": True, + "default_newline": True, + "has_trailing_newline": True, + } + self._check_fields(is_filtered_field, self.module) + + def test_param(self) -> None: + """Check if a CST Param node is correctly filtered.""" + is_filtered_field = { + "name": False, + "annotation": False, + "equal": True, + "default": False, + "comma": True, + "star": False, + "whitespace_after_star": False, + "whitespace_after_param": False, + } + self._check_fields(is_filtered_field, self.param) + + def test_pass(self) -> None: + """Check if a CST Pass node is correctly filtered.""" + is_filtered_field = { + "semicolon": True, + } + self._check_fields(is_filtered_field, self._pass) + + +class IsDefaultNodeFieldTest(_NodeFieldsTest): + """``is_default_node_field`` tests.""" + + def _check_fields(self, is_filtered_field: dict[str, bool], node: CSTNode) -> None: + fields = get_node_fields(node) + self.assertEqual(len(is_filtered_field), len(fields)) + for field in fields: + self.assertEqual( + is_filtered_field[field.name], + is_default_node_field(node, field), + f"Node ``{node.__class__.__qualname__}`` field '{field.name}' " + f"{'should have' if is_filtered_field[field.name] else 'should not have'} " + "been filtered by ``is_default_node_field``", + ) + + def test_module(self) -> None: + """Check if a CST Module node is correctly filtered.""" + is_filtered_field = { + "body": False, + "header": True, + "footer": True, + "encoding": True, + "default_indent": True, + "default_newline": True, + "has_trailing_newline": True, + } + self._check_fields(is_filtered_field, self.module) + + def test_annotation(self) -> None: + """Check if a CST Annotation node is correctly filtered.""" + is_filtered_field = { + "annotation": False, + "whitespace_before_indicator": False, + "whitespace_after_indicator": True, + } + self._check_fields(is_filtered_field, self.annotation) + + def test_param(self) -> None: + """Check if a CST Param node is correctly filtered.""" + is_filtered_field = { + "name": False, + "annotation": False, + "equal": True, + "default": True, + "comma": True, + "star": False, + "whitespace_after_star": True, + "whitespace_after_param": True, + } + self._check_fields(is_filtered_field, self.param) + + def test_statement(self) -> None: + """Check if a CST SimpleStatementLine node is correctly filtered.""" + is_filtered_field = { + "body": False, + "leading_lines": True, + "trailing_whitespace": True, + } + self._check_fields(is_filtered_field, self.statement) + + def test_indent(self) -> None: + """Check if a CST IndentedBlock node is correctly filtered.""" + is_filtered_field = { + "body": False, + "header": True, + "indent": True, + "footer": True, + } + self._check_fields(is_filtered_field, self.indent) + + def test_function(self) -> None: + """Check if a CST FunctionDef node is correctly filtered.""" + is_filtered_field = { + "name": False, + "params": False, + "body": False, + "decorators": True, + "returns": False, + "asynchronous": True, + "leading_lines": True, + "lines_after_decorators": True, + "whitespace_after_def": True, + "whitespace_after_name": True, + "whitespace_before_params": True, + "whitespace_before_colon": True, + "type_parameters": True, + "whitespace_after_type_parameters": True, + } + self._check_fields(is_filtered_field, self.function) diff --git a/libcst/tool.py b/libcst/tool.py index 5469ef27..85a977be 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -22,15 +22,7 @@ from typing import Any, Callable, Dict, List, Sequence, Tuple, Type import yaml -from libcst import ( - CSTNode, - IndentedBlock, - LIBCST_VERSION, - Module, - parse_module, - PartialParserConfig, -) -from libcst._nodes.deep_equals import deep_equals +from libcst import CSTNode, LIBCST_VERSION, parse_module, PartialParserConfig from libcst._parser.parso.utils import parse_version_string from libcst.codemod import ( CodemodCommand, @@ -40,6 +32,7 @@ from libcst.codemod import ( gather_files, parallel_exec_transform_with_prettyprint, ) +from libcst.helpers import filter_node_fields _DEFAULT_INDENT: str = " " @@ -54,76 +47,14 @@ def _node_repr_recursive( # noqa: C901 ) -> List[str]: if isinstance(node, CSTNode): # This is a CSTNode, we must pretty-print it. + fields: Sequence["dataclasses.Field[CSTNode]"] = filter_node_fields( + node=node, + show_defaults=show_defaults, + show_syntax=show_syntax, + show_whitespace=show_whitespace, + ) + tokens: List[str] = [node.__class__.__name__] - fields: Sequence["dataclasses.Field[object]"] = dataclasses.fields(node) - - # Hide all fields prefixed with "_" - fields = [f for f in fields if f.name[0] != "_"] - - # Filter whitespace nodes if needed - if not show_whitespace: - - def _is_whitespace(field: "dataclasses.Field[object]") -> bool: - if "whitespace" in field.name: - return True - if "leading_lines" in field.name: - return True - if "lines_after_decorators" in field.name: - return True - if isinstance(node, (IndentedBlock, Module)) and field.name in [ - "header", - "footer", - ]: - return True - if isinstance(node, IndentedBlock) and field.name == "indent": - return True - return False - - fields = [f for f in fields if not _is_whitespace(f)] - # Filter values which aren't changed from their defaults - if not show_defaults: - - def _get_default(fld: "dataclasses.Field[object]") -> object: - if fld.default_factory is not dataclasses.MISSING: - # pyre-fixme[29]: `Union[dataclasses._MISSING_TYPE, - # dataclasses._DefaultFactory[object]]` is not a function. - return fld.default_factory() - return fld.default - - fields = [ - f - for f in fields - if not deep_equals(getattr(node, f.name), _get_default(f)) - ] - # Filter out values which aren't interesting if needed - if not show_syntax: - - def _is_syntax(field: "dataclasses.Field[object]") -> bool: - if isinstance(node, Module) and field.name in [ - "encoding", - "default_indent", - "default_newline", - "has_trailing_newline", - ]: - return True - type_str = repr(field.type) - if ( - "Sentinel" in type_str - and field.name not in ["star_arg", "star", "posonly_ind"] - and "whitespace" not in field.name - ): - # This is a value that can optionally be specified, so its - # definitely syntax. - return True - - for name in ["Semicolon", "Colon", "Comma", "Dot", "AssignEqual"]: - # These are all nodes that exist for separation syntax - if name in type_str: - return True - - return False - - fields = [f for f in fields if not _is_syntax(f)] if len(fields) == 0: tokens.append("()") @@ -204,12 +135,12 @@ def dump( from the default contruction of the node while also hiding whitespace and syntax fields. - Setting ``show_default`` to ``True`` will add fields regardless if their + Setting ``show_defaults`` to ``True`` will add fields regardless if their value is different from the default value. Setting ``show_whitespace`` will add whitespace fields and setting ``show_syntax`` will add syntax fields while respecting the value of - ``show_default``. + ``show_defaults``. When all keyword args are set to true, the output of this function is indentical to the __repr__ method of the node. From 6bbc69316b72e05da58fc2d9715d7a959576d1ca Mon Sep 17 00:00:00 2001 From: zaicruvoir1rominet <105832258+zaicruvoir1rominet@users.noreply.github.com> Date: Mon, 20 May 2024 17:25:13 +0200 Subject: [PATCH 477/632] Add the ability to dump CST to .dot (graphviz) files (#1147) * Make the nodes fields filtering process - from libcst.tool - public, so that other libraries may provide their own custom representation of LibCST graphs. * Create functions to access & filter CST-node fields (with appropriate docstrings & tests), in libcst.helpers * Add new CST-node fields functions to helpers documentation. --- docs/source/tutorial.ipynb | 52 ++++- libcst/display/__init__.py | 12 ++ libcst/display/graphviz.py | 187 ++++++++++++++++++ libcst/display/tests/__init__.py | 4 + libcst/display/tests/test_dump_graphviz.py | 83 ++++++++ .../tests/test_dump_text.py} | 2 +- libcst/display/text.py | 133 +++++++++++++ libcst/tool.py | 160 +++------------ 8 files changed, 493 insertions(+), 140 deletions(-) create mode 100644 libcst/display/__init__.py create mode 100644 libcst/display/graphviz.py create mode 100644 libcst/display/tests/__init__.py create mode 100644 libcst/display/tests/test_dump_graphviz.py rename libcst/{tests/test_tool.py => display/tests/test_dump_text.py} (99%) create mode 100644 libcst/display/text.py diff --git a/docs/source/tutorial.ipynb b/docs/source/tutorial.ipynb index a7c3cd6b..3e6bdac0 100644 --- a/docs/source/tutorial.ipynb +++ b/docs/source/tutorial.ipynb @@ -1,24 +1,25 @@ { "cells": [ { - "cell_type": "raw", "metadata": { "raw_mimetype": "text/restructuredtext" }, + "cell_type": "raw", "source": [ "====================\n", "Parsing and Visiting\n", "====================\n", "\n", - "LibCST provides helpers to parse source code string as concrete syntax tree. In order to perform static analysis to identify patterns in the tree or modify the tree programmatically, we can use visitor pattern to traverse the tree. In this tutorial, we demonstrate a common three-step-workflow to build an automated refactoring (codemod) application:\n", + "LibCST provides helpers to parse source code string as concrete syntax tree. In order to perform static analysis to identify patterns in the tree or modify the tree programmatically, we can use visitor pattern to traverse the tree. In this tutorial, we demonstrate a common four-step-workflow to build an automated refactoring (codemod) application:\n", "\n", "1. `Parse Source Code <#Parse-Source-Code>`_\n", - "2. `Build Visitor or Transformer <#Build-Visitor-or-Transformer>`_\n", - "3. `Generate Source Code <#Generate-Source-Code>`_\n", + "2. `Display The Source Code CST <#Display-Source-Code-CST>`_\n", + "3. `Build Visitor or Transformer <#Build-Visitor-or-Transformer>`_\n", + "4. `Generate Source Code <#Generate-Source-Code>`_\n", "\n", "Parse Source Code\n", "=================\n", - "LibCST provides various helpers to parse source code as concrete syntax tree: :func:`~libcst.parse_module`, :func:`~libcst.parse_expression` and :func:`~libcst.parse_statement` (see :doc:`Parsing ` for more detail). The default :class:`~libcst.CSTNode` repr provides pretty print formatting for reading the tree easily." + "LibCST provides various helpers to parse source code as concrete syntax tree: :func:`~libcst.parse_module`, :func:`~libcst.parse_expression` and :func:`~libcst.parse_statement` (see :doc:`Parsing ` for more detail)." ] }, { @@ -41,7 +42,42 @@ "source": [ "import libcst as cst\n", "\n", - "cst.parse_expression(\"1 + 2\")" + "source_tree = cst.parse_expression(\"1 + 2\")" + ] + }, + { + "metadata": { + "raw_mimetype": "text/restructuredtext" + }, + "cell_type": "raw", + "source": [ + "|\n", + "Display Source Code CST\n", + "=======================\n", + "The default :class:`~libcst.CSTNode` repr provides pretty print formatting for displaying the entire CST tree." + ] + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": "print(source_tree)" + }, + { + "metadata": {}, + "cell_type": "raw", + "source": "The entire CST tree may be overwhelming at times. To only focus on essential elements of the CST tree, LibCST provides the ``dump`` helper." + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "from libcst.display import dump\n", + "\n", + "print(dump(source_tree))" ] }, { @@ -50,6 +86,8 @@ "raw_mimetype": "text/restructuredtext" }, "source": [ + " \n", + "|\n", "Example: add typing annotation from pyi stub file to Python source\n", "------------------------------------------------------------------\n", "Python `typing annotation `_ was added in Python 3.5. Some Python applications add typing annotations in separate ``pyi`` stub files in order to support old Python versions. When applications decide to stop supporting old Python versions, they'll want to automatically copy the type annotation from a pyi file to a source file. Here we demonstrate how to do that easliy using LibCST. The first step is to parse the pyi stub and source files as trees." @@ -92,6 +130,7 @@ "raw_mimetype": "text/restructuredtext" }, "source": [ + "|\n", "Build Visitor or Transformer\n", "============================\n", "For traversing and modifying the tree, LibCST provides Visitor and Transformer classes similar to the `ast module `_. To implement a visitor (read only) or transformer (read/write), simply implement a subclass of :class:`~libcst.CSTVisitor` or :class:`~libcst.CSTTransformer` (see :doc:`Visitors ` for more detail).\n", @@ -184,6 +223,7 @@ "raw_mimetype": "text/restructuredtext" }, "source": [ + "|\n", "Generate Source Code\n", "====================\n", "Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `ufmt `_ to reformate the code to keep a consistent coding style." diff --git a/libcst/display/__init__.py b/libcst/display/__init__.py new file mode 100644 index 00000000..49365a58 --- /dev/null +++ b/libcst/display/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from libcst.display.graphviz import dump_graphviz +from libcst.display.text import dump + +__all__ = [ + "dump", + "dump_graphviz", +] diff --git a/libcst/display/graphviz.py b/libcst/display/graphviz.py new file mode 100644 index 00000000..e6b5b748 --- /dev/null +++ b/libcst/display/graphviz.py @@ -0,0 +1,187 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from __future__ import annotations + +import textwrap +from collections.abc import Sequence + +from libcst import CSTNode +from libcst.helpers import filter_node_fields + + +_syntax_style = ', color="#777777", fillcolor="#eeeeee"' +_value_style = ', color="#3e99ed", fillcolor="#b8d9f8"' + +node_style: dict[str, str] = { + "__default__": "", + "EmptyLine": _syntax_style, + "IndentedBlock": _syntax_style, + "SimpleStatementLine": _syntax_style, + "SimpleWhitespace": _syntax_style, + "TrailingWhitespace": _syntax_style, + "Newline": _syntax_style, + "Comma": _syntax_style, + "LeftParen": _syntax_style, + "RightParen": _syntax_style, + "LeftSquareBracket": _syntax_style, + "RightSquareBracket": _syntax_style, + "LeftCurlyBrace": _syntax_style, + "RightCurlyBrace": _syntax_style, + "BaseSmallStatement": _syntax_style, + "BaseCompoundStatement": _syntax_style, + "SimpleStatementSuite": _syntax_style, + "Colon": _syntax_style, + "Dot": _syntax_style, + "Semicolon": _syntax_style, + "ParenthesizedWhitespace": _syntax_style, + "BaseParenthesizableWhitespace": _syntax_style, + "Comment": _syntax_style, + "Name": _value_style, + "Integer": _value_style, + "Float": _value_style, + "Imaginary": _value_style, + "SimpleString": _value_style, + "FormattedStringText": _value_style, +} +"""Graphviz style for specific CST nodes""" + + +def _create_node_graphviz(node: CSTNode) -> str: + """Creates the graphviz representation of a CST node.""" + node_name = node.__class__.__qualname__ + + if node_name in node_style: + style = node_style[node_name] + else: + style = node_style["__default__"] + + # pyre-ignore[16]: the existence of node.value is checked before usage + if hasattr(node, "value") and isinstance(node.value, str): + line_break = r"\n" + quote = '"' + escaped_quote = r"\"" + value = f"{line_break}<{node.value.replace(quote, escaped_quote)}>" + style = style + ', shape="box"' + else: + value = "" + + return f'{id(node)} [label="{node_name}{value}"{style}]' + + +def _node_repr_recursive( + node: object, + *, + show_defaults: bool, + show_syntax: bool, + show_whitespace: bool, +) -> list[str]: + """Creates the graphviz representation of a CST node, + and of its child nodes.""" + if not isinstance(node, CSTNode): + return [] + + fields = filter_node_fields( + node, + show_defaults=show_defaults, + show_syntax=show_syntax, + show_whitespace=show_whitespace, + ) + + graphviz_lines: list[str] = [_create_node_graphviz(node)] + + for field in fields: + value = getattr(node, field.name) + if isinstance(value, CSTNode): + # Display a single node + graphviz_lines.append(f'{id(node)} -> {id(value)} [label="{field.name}"]') + graphviz_lines.extend( + _node_repr_recursive( + value, + show_defaults=show_defaults, + show_syntax=show_syntax, + show_whitespace=show_whitespace, + ) + ) + continue + + if isinstance(value, Sequence): + # Display a sequence of nodes + for index, child in enumerate(value): + if isinstance(child, CSTNode): + graphviz_lines.append( + rf'{id(node)} -> {id(child)} [label="{field.name}[{index}]"]' + ) + graphviz_lines.extend( + _node_repr_recursive( + child, + show_defaults=show_defaults, + show_syntax=show_syntax, + show_whitespace=show_whitespace, + ) + ) + + return graphviz_lines + + +def dump_graphviz( + node: object, + *, + show_defaults: bool = False, + show_syntax: bool = False, + show_whitespace: bool = False, +) -> str: + """ + Returns a string representation (in graphviz .dot style) of a CST node, + and its child nodes. + + Setting ``show_defaults`` to ``True`` will add fields regardless if their + value is different from the default value. + + Setting ``show_whitespace`` will add whitespace fields and setting + ``show_syntax`` will add syntax fields while respecting the value of + ``show_defaults``. + """ + + graphviz_settings = textwrap.dedent( + r""" + layout=dot; + rankdir=TB; + splines=line; + ranksep=0.5; + nodesep=1.0; + dpi=300; + bgcolor=transparent; + node [ + style=filled, + color="#fb8d3f", + fontcolor="#4b4f54", + fillcolor="#fdd2b3", + fontname="Source Code Pro Semibold", + penwidth="2", + group=main, + ]; + edge [ + color="#999999", + fontcolor="#4b4f54", + fontname="Source Code Pro Semibold", + fontsize=12, + penwidth=2, + ]; + """[ + 1: + ] + ) + + return "\n".join( + ["digraph {", graphviz_settings] + + _node_repr_recursive( + node, + show_defaults=show_defaults, + show_syntax=show_syntax, + show_whitespace=show_whitespace, + ) + + ["}"] + ) diff --git a/libcst/display/tests/__init__.py b/libcst/display/tests/__init__.py new file mode 100644 index 00000000..7bec24cb --- /dev/null +++ b/libcst/display/tests/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. diff --git a/libcst/display/tests/test_dump_graphviz.py b/libcst/display/tests/test_dump_graphviz.py new file mode 100644 index 00000000..17ce231f --- /dev/null +++ b/libcst/display/tests/test_dump_graphviz.py @@ -0,0 +1,83 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from __future__ import annotations + +from textwrap import dedent +from typing import TYPE_CHECKING + +from libcst import parse_module +from libcst.display import dump_graphviz +from libcst.testing.utils import UnitTest + +if TYPE_CHECKING: + from libcst import Module + + +class CSTDumpGraphvizTest(UnitTest): + """Check dump_graphviz contains CST nodes.""" + + source_code: str = dedent( + r""" + def foo(a: str) -> None: + pass ; + pass + return + """[ + 1: + ] + ) + cst: Module + + @classmethod + def setUpClass(cls) -> None: + cls.cst = parse_module(cls.source_code) + + def _assert_node(self, node_name: str, graphviz_str: str) -> None: + self.assertIn( + node_name, graphviz_str, f"No node {node_name} found in graphviz_dump" + ) + + def _check_essential_nodes_in_tree(self, graphviz_str: str) -> None: + # Check CST nodes are present in graphviz string + self._assert_node("Module", graphviz_str) + self._assert_node("FunctionDef", graphviz_str) + self._assert_node("Name", graphviz_str) + self._assert_node("Parameters", graphviz_str) + self._assert_node("Param", graphviz_str) + self._assert_node("Annotation", graphviz_str) + self._assert_node("IndentedBlock", graphviz_str) + self._assert_node("SimpleStatementLine", graphviz_str) + self._assert_node("Pass", graphviz_str) + self._assert_node("Return", graphviz_str) + + # Check CST values are present in graphviz string + self._assert_node("", graphviz_str) + self._assert_node("
", graphviz_str) + self._assert_node("", graphviz_str) + self._assert_node("", graphviz_str) + + def test_essential_tree(self) -> None: + """Check essential nodes are present in the CST graphviz dump.""" + graphviz_str = dump_graphviz(self.cst) + self._check_essential_nodes_in_tree(graphviz_str) + + def test_full_tree(self) -> None: + """Check all nodes are present in the CST graphviz dump.""" + graphviz_str = dump_graphviz( + self.cst, + show_whitespace=True, + show_defaults=True, + show_syntax=True, + ) + self._check_essential_nodes_in_tree(graphviz_str) + + self._assert_node("Semicolon", graphviz_str) + self._assert_node("SimpleWhitespace", graphviz_str) + self._assert_node("Newline", graphviz_str) + self._assert_node("TrailingWhitespace", graphviz_str) + + self._assert_node("<>", graphviz_str) + self._assert_node("< >", graphviz_str) diff --git a/libcst/tests/test_tool.py b/libcst/display/tests/test_dump_text.py similarity index 99% rename from libcst/tests/test_tool.py rename to libcst/display/tests/test_dump_text.py index 2042ef9b..bbfc6eb0 100644 --- a/libcst/tests/test_tool.py +++ b/libcst/display/tests/test_dump_text.py @@ -10,7 +10,7 @@ from libcst.testing.utils import UnitTest from libcst.tool import dump -class PrettyPrintNodesTest(UnitTest): +class CSTDumpTextTest(UnitTest): def test_full_tree(self) -> None: module = r""" Module( diff --git a/libcst/display/text.py b/libcst/display/text.py new file mode 100644 index 00000000..3c6dc288 --- /dev/null +++ b/libcst/display/text.py @@ -0,0 +1,133 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from __future__ import annotations + +import dataclasses +from typing import List, Sequence + +from libcst import CSTNode +from libcst.helpers import filter_node_fields + +_DEFAULT_INDENT: str = " " + + +def _node_repr_recursive( # noqa: C901 + node: object, + *, + indent: str = _DEFAULT_INDENT, + show_defaults: bool = False, + show_syntax: bool = False, + show_whitespace: bool = False, +) -> List[str]: + if isinstance(node, CSTNode): + # This is a CSTNode, we must pretty-print it. + fields: Sequence[dataclasses.Field[CSTNode]] = filter_node_fields( + node=node, + show_defaults=show_defaults, + show_syntax=show_syntax, + show_whitespace=show_whitespace, + ) + + tokens: List[str] = [node.__class__.__name__] + + if len(fields) == 0: + tokens.append("()") + else: + tokens.append("(\n") + + for field in fields: + child_tokens: List[str] = [field.name, "="] + value = getattr(node, field.name) + + if isinstance(value, (str, bytes)) or not isinstance(value, Sequence): + # Render out the node contents + child_tokens.extend( + _node_repr_recursive( + value, + indent=indent, + show_whitespace=show_whitespace, + show_defaults=show_defaults, + show_syntax=show_syntax, + ) + ) + elif isinstance(value, Sequence): + # Render out a list of individual nodes + if len(value) > 0: + child_tokens.append("[\n") + list_tokens: List[str] = [] + + last_value = len(value) - 1 + for j, v in enumerate(value): + list_tokens.extend( + _node_repr_recursive( + v, + indent=indent, + show_whitespace=show_whitespace, + show_defaults=show_defaults, + show_syntax=show_syntax, + ) + ) + if j != last_value: + list_tokens.append(",\n") + else: + list_tokens.append(",") + + split_by_line = "".join(list_tokens).split("\n") + child_tokens.append( + "\n".join(f"{indent}{t}" for t in split_by_line) + ) + + child_tokens.append("\n]") + else: + child_tokens.append("[]") + else: + raise Exception("Logic error!") + + # Handle indentation and trailing comma. + split_by_line = "".join(child_tokens).split("\n") + tokens.append("\n".join(f"{indent}{t}" for t in split_by_line)) + tokens.append(",\n") + + tokens.append(")") + + return tokens + else: + # This is a python value, just return the repr + return [repr(node)] + + +def dump( + node: CSTNode, + *, + indent: str = _DEFAULT_INDENT, + show_defaults: bool = False, + show_syntax: bool = False, + show_whitespace: bool = False, +) -> str: + """ + Returns a string representation of the node that contains minimal differences + from the default contruction of the node while also hiding whitespace and + syntax fields. + + Setting ``show_defaults`` to ``True`` will add fields regardless if their + value is different from the default value. + + Setting ``show_whitespace`` will add whitespace fields and setting + ``show_syntax`` will add syntax fields while respecting the value of + ``show_defaults``. + + When all keyword args are set to true, the output of this function is + indentical to the __repr__ method of the node. + """ + return "".join( + _node_repr_recursive( + node, + indent=indent, + show_defaults=show_defaults, + show_syntax=show_syntax, + show_whitespace=show_whitespace, + ) + ) diff --git a/libcst/tool.py b/libcst/tool.py index 85a977be..3c00ba8d 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -9,7 +9,6 @@ # python -m libcst.tool print python_file.py import argparse -import dataclasses import importlib import inspect import os @@ -18,11 +17,11 @@ import shutil import sys import textwrap from abc import ABC, abstractmethod -from typing import Any, Callable, Dict, List, Sequence, Tuple, Type +from typing import Any, Callable, Dict, List, Tuple, Type import yaml -from libcst import CSTNode, LIBCST_VERSION, parse_module, PartialParserConfig +from libcst import LIBCST_VERSION, parse_module, PartialParserConfig from libcst._parser.parso.utils import parse_version_string from libcst.codemod import ( CodemodCommand, @@ -32,128 +31,8 @@ from libcst.codemod import ( gather_files, parallel_exec_transform_with_prettyprint, ) -from libcst.helpers import filter_node_fields - -_DEFAULT_INDENT: str = " " - - -def _node_repr_recursive( # noqa: C901 - node: object, - *, - indent: str = _DEFAULT_INDENT, - show_defaults: bool = False, - show_syntax: bool = False, - show_whitespace: bool = False, -) -> List[str]: - if isinstance(node, CSTNode): - # This is a CSTNode, we must pretty-print it. - fields: Sequence["dataclasses.Field[CSTNode]"] = filter_node_fields( - node=node, - show_defaults=show_defaults, - show_syntax=show_syntax, - show_whitespace=show_whitespace, - ) - - tokens: List[str] = [node.__class__.__name__] - - if len(fields) == 0: - tokens.append("()") - else: - tokens.append("(\n") - - for field in fields: - child_tokens: List[str] = [field.name, "="] - value = getattr(node, field.name) - - if isinstance(value, (str, bytes)) or not isinstance(value, Sequence): - # Render out the node contents - child_tokens.extend( - _node_repr_recursive( - value, - indent=indent, - show_whitespace=show_whitespace, - show_defaults=show_defaults, - show_syntax=show_syntax, - ) - ) - elif isinstance(value, Sequence): - # Render out a list of individual nodes - if len(value) > 0: - child_tokens.append("[\n") - list_tokens: List[str] = [] - - last_value = len(value) - 1 - for j, v in enumerate(value): - list_tokens.extend( - _node_repr_recursive( - v, - indent=indent, - show_whitespace=show_whitespace, - show_defaults=show_defaults, - show_syntax=show_syntax, - ) - ) - if j != last_value: - list_tokens.append(",\n") - else: - list_tokens.append(",") - - split_by_line = "".join(list_tokens).split("\n") - child_tokens.append( - "\n".join(f"{indent}{t}" for t in split_by_line) - ) - - child_tokens.append("\n]") - else: - child_tokens.append("[]") - else: - raise Exception("Logic error!") - - # Handle indentation and trailing comma. - split_by_line = "".join(child_tokens).split("\n") - tokens.append("\n".join(f"{indent}{t}" for t in split_by_line)) - tokens.append(",\n") - - tokens.append(")") - - return tokens - else: - # This is a python value, just return the repr - return [repr(node)] - - -def dump( - node: CSTNode, - *, - indent: str = _DEFAULT_INDENT, - show_defaults: bool = False, - show_syntax: bool = False, - show_whitespace: bool = False, -) -> str: - """ - Returns a string representation of the node that contains minimal differences - from the default contruction of the node while also hiding whitespace and - syntax fields. - - Setting ``show_defaults`` to ``True`` will add fields regardless if their - value is different from the default value. - - Setting ``show_whitespace`` will add whitespace fields and setting - ``show_syntax`` will add syntax fields while respecting the value of - ``show_defaults``. - - When all keyword args are set to true, the output of this function is - indentical to the __repr__ method of the node. - """ - return "".join( - _node_repr_recursive( - node, - indent=indent, - show_defaults=show_defaults, - show_syntax=show_syntax, - show_whitespace=show_whitespace, - ) - ) +from libcst.display import dump, dump_graphviz +from libcst.display.text import _DEFAULT_INDENT def _print_tree_impl(proc_name: str, command_args: List[str]) -> int: @@ -183,6 +62,11 @@ def _print_tree_impl(proc_name: str, command_args: List[str]) -> int: action="store_true", help="Show values that exist only for syntax, like commas or semicolons", ) + parser.add_argument( + "--graphviz", + action="store_true", + help="Displays the graph in .dot format, compatible with Graphviz", + ) parser.add_argument( "--indent-string", default=_DEFAULT_INDENT, @@ -217,15 +101,25 @@ def _print_tree_impl(proc_name: str, command_args: List[str]) -> int: else PartialParserConfig() ), ) - print( - dump( - tree, - indent=args.indent_string, - show_defaults=args.show_defaults, - show_syntax=args.show_syntax, - show_whitespace=args.show_whitespace, + if not args.graphviz: + print( + dump( + tree, + indent=args.indent_string, + show_defaults=args.show_defaults, + show_syntax=args.show_syntax, + show_whitespace=args.show_whitespace, + ) + ) + else: + print( + dump_graphviz( + tree, + show_defaults=args.show_defaults, + show_syntax=args.show_syntax, + show_whitespace=args.show_whitespace, + ) ) - ) return 0 From 71b0a1288b4704987bab6f53bf40940cb4c857e2 Mon Sep 17 00:00:00 2001 From: martin <48778384+thereversiblewheel@users.noreply.github.com> Date: Mon, 20 May 2024 11:26:38 -0400 Subject: [PATCH 478/632] Implement Type Defaults for Type Parameters (PEP 696) (#1141) Co-authored-by: thereversiblewheel --- libcst/_nodes/statement.py | 47 +- libcst/_nodes/tests/test_type_alias.py | 124 + libcst/_typed_visitor.py | 14940 +++---- libcst/matchers/__init__.py | 32562 ++++++++-------- libcst/matchers/_return_types.py | 750 +- libcst/tests/test_roundtrip.py | 1 + native/libcst/src/nodes/statement.rs | 27 +- native/libcst/src/parser/grammar.rs | 54 +- .../libcst/tests/fixtures/type_parameters.py | 14 +- 9 files changed, 24422 insertions(+), 24097 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index bf9e8ecc..6cad7684 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -7,7 +7,7 @@ import inspect import re from abc import ABC, abstractmethod from dataclasses import dataclass, field -from typing import Optional, Pattern, Sequence, Union +from typing import Literal, Optional, Pattern, Sequence, Union from libcst._add_slots import add_slots from libcst._maybe_sentinel import MaybeSentinel @@ -3653,8 +3653,34 @@ class TypeParam(CSTNode): #: with a comma only if a comma is required. comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT + #: The equal sign used to denote assignment if there is a default. + equal: Union[AssignEqual, MaybeSentinel] = MaybeSentinel.DEFAULT + + #: The star used to denote a variadic default + star: Literal["", "*"] = "" + + #: The whitespace between the star and the type. + whitespace_after_star: SimpleWhitespace = SimpleWhitespace.field("") + + #: Any optional default value, used when the argument is not supplied. + default: Optional[BaseExpression] = None + def _codegen_impl(self, state: CodegenState, default_comma: bool = False) -> None: self.param._codegen(state) + + equal = self.equal + if equal is MaybeSentinel.DEFAULT and self.default is not None: + state.add_token(" = ") + elif isinstance(equal, AssignEqual): + equal._codegen(state) + + state.add_token(self.star) + self.whitespace_after_star._codegen(state) + + default = self.default + if default is not None: + default._codegen(state) + comma = self.comma if isinstance(comma, MaybeSentinel): if default_comma: @@ -3663,10 +3689,27 @@ class TypeParam(CSTNode): comma._codegen(state) def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeParam": - return TypeParam( + ret = TypeParam( param=visit_required(self, "param", self.param, visitor), + equal=visit_sentinel(self, "equal", self.equal, visitor), + star=self.star, + whitespace_after_star=visit_required( + self, "whitespace_after_star", self.whitespace_after_star, visitor + ), + default=visit_optional(self, "default", self.default, visitor), comma=visit_sentinel(self, "comma", self.comma, visitor), ) + return ret + + def _validate(self) -> None: + if self.default is None and isinstance(self.equal, AssignEqual): + raise CSTValidationError( + "Must have a default when specifying an AssignEqual." + ) + if self.star and not (self.default or isinstance(self.equal, AssignEqual)): + raise CSTValidationError("Star can only be present if a default") + if isinstance(self.star, str) and self.star not in ("", "*"): + raise CSTValidationError("Must specify either '' or '*' for star.") @add_slots diff --git a/libcst/_nodes/tests/test_type_alias.py b/libcst/_nodes/tests/test_type_alias.py index 11fd1123..aa26103b 100644 --- a/libcst/_nodes/tests/test_type_alias.py +++ b/libcst/_nodes/tests/test_type_alias.py @@ -56,6 +56,79 @@ class TypeAliasCreationTest(CSTNodeTest): "code": "type foo[T: str, *Ts, **KW] = bar | baz", "expected_position": CodeRange((1, 0), (1, 39)), }, + { + "node": cst.TypeAlias( + cst.Name("foo"), + type_parameters=cst.TypeParameters( + [ + cst.TypeParam( + cst.TypeVar(cst.Name("T")), default=cst.Name("str") + ), + ] + ), + value=cst.Name("bar"), + ), + "code": "type foo[T = str] = bar", + "expected_position": CodeRange((1, 0), (1, 23)), + }, + { + "node": cst.TypeAlias( + cst.Name("foo"), + type_parameters=cst.TypeParameters( + [ + cst.TypeParam( + cst.ParamSpec(cst.Name("P")), + default=cst.List( + elements=[ + cst.Element(cst.Name("int")), + cst.Element(cst.Name("str")), + ] + ), + ), + ] + ), + value=cst.Name("bar"), + ), + "code": "type foo[**P = [int, str]] = bar", + "expected_position": CodeRange((1, 0), (1, 32)), + }, + { + "node": cst.TypeAlias( + cst.Name("foo"), + type_parameters=cst.TypeParameters( + [ + cst.TypeParam( + cst.TypeVarTuple(cst.Name("T")), + equal=cst.AssignEqual(), + default=cst.Name("default"), + star="*", + ), + ] + ), + value=cst.Name("bar"), + ), + "code": "type foo[*T = *default] = bar", + "expected_position": CodeRange((1, 0), (1, 29)), + }, + { + "node": cst.TypeAlias( + cst.Name("foo"), + type_parameters=cst.TypeParameters( + [ + cst.TypeParam( + cst.TypeVarTuple(cst.Name("T")), + equal=cst.AssignEqual(), + default=cst.Name("default"), + star="*", + whitespace_after_star=cst.SimpleWhitespace(" "), + ), + ] + ), + value=cst.Name("bar"), + ), + "code": "type foo[*T = * default] = bar", + "expected_position": CodeRange((1, 0), (1, 31)), + }, ) ) def test_valid(self, **kwargs: Any) -> None: @@ -125,6 +198,57 @@ class TypeAliasParserTest(CSTNodeTest): "code": "type foo [T:str,** KW , ] = bar ; \n", "parser": parse_statement, }, + { + "node": cst.SimpleStatementLine( + [ + cst.TypeAlias( + cst.Name("foo"), + type_parameters=cst.TypeParameters( + [ + cst.TypeParam( + cst.TypeVarTuple(cst.Name("P")), + star="*", + equal=cst.AssignEqual(), + default=cst.Name("default"), + ), + ] + ), + value=cst.Name("bar"), + whitespace_after_name=cst.SimpleWhitespace(" "), + whitespace_after_type_parameters=cst.SimpleWhitespace(" "), + ) + ] + ), + "code": "type foo [*P = *default] = bar\n", + "parser": parse_statement, + }, + { + "node": cst.SimpleStatementLine( + [ + cst.TypeAlias( + cst.Name("foo"), + type_parameters=cst.TypeParameters( + [ + cst.TypeParam( + cst.TypeVarTuple(cst.Name("P")), + star="*", + whitespace_after_star=cst.SimpleWhitespace( + " " + ), + equal=cst.AssignEqual(), + default=cst.Name("default"), + ), + ] + ), + value=cst.Name("bar"), + whitespace_after_name=cst.SimpleWhitespace(" "), + whitespace_after_type_parameters=cst.SimpleWhitespace(" "), + ) + ] + ), + "code": "type foo [*P = * default] = bar\n", + "parser": parse_statement, + }, ) ) def test_valid(self, **kwargs: Any) -> None: diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 7e1d1c42..742d9f10 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -1,7454 +1,7486 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -# This file was generated by libcst.codegen.gen_matcher_classes -from typing import Optional, TYPE_CHECKING, Union - -from libcst._flatten_sentinel import FlattenSentinel -from libcst._maybe_sentinel import MaybeSentinel -from libcst._removal_sentinel import RemovalSentinel -from libcst._typed_visitor_base import mark_no_op - - -if TYPE_CHECKING: - from libcst._nodes.expression import ( # noqa: F401 - Annotation, - Arg, - Asynchronous, - Attribute, - Await, - BaseDictElement, - BaseElement, - BaseExpression, - BaseFormattedStringContent, - BaseSlice, - BinaryOperation, - BooleanOperation, - Call, - Comparison, - ComparisonTarget, - CompFor, - CompIf, - ConcatenatedString, - Dict, - DictComp, - DictElement, - Element, - Ellipsis, - Float, - FormattedString, - FormattedStringExpression, - FormattedStringText, - From, - GeneratorExp, - IfExp, - Imaginary, - Index, - Integer, - Lambda, - LeftCurlyBrace, - LeftParen, - LeftSquareBracket, - List, - ListComp, - Name, - NamedExpr, - Param, - Parameters, - ParamSlash, - ParamStar, - RightCurlyBrace, - RightParen, - RightSquareBracket, - Set, - SetComp, - SimpleString, - Slice, - StarredDictElement, - StarredElement, - Subscript, - SubscriptElement, - Tuple, - UnaryOperation, - Yield, - ) - from libcst._nodes.module import Module # noqa: F401 - from libcst._nodes.op import ( # noqa: F401 - Add, - AddAssign, - And, - AssignEqual, - BaseAugOp, - BaseBinaryOp, - BaseBooleanOp, - BaseCompOp, - BaseUnaryOp, - BitAnd, - BitAndAssign, - BitInvert, - BitOr, - BitOrAssign, - BitXor, - BitXorAssign, - Colon, - Comma, - Divide, - DivideAssign, - Dot, - Equal, - FloorDivide, - FloorDivideAssign, - GreaterThan, - GreaterThanEqual, - ImportStar, - In, - Is, - IsNot, - LeftShift, - LeftShiftAssign, - LessThan, - LessThanEqual, - MatrixMultiply, - MatrixMultiplyAssign, - Minus, - Modulo, - ModuloAssign, - Multiply, - MultiplyAssign, - Not, - NotEqual, - NotIn, - Or, - Plus, - Power, - PowerAssign, - RightShift, - RightShiftAssign, - Semicolon, - Subtract, - SubtractAssign, - ) - from libcst._nodes.statement import ( # noqa: F401 - AnnAssign, - AsName, - Assert, - Assign, - AssignTarget, - AugAssign, - BaseSmallStatement, - BaseStatement, - BaseSuite, - Break, - ClassDef, - Continue, - Decorator, - Del, - Else, - ExceptHandler, - ExceptStarHandler, - Expr, - Finally, - For, - FunctionDef, - Global, - If, - Import, - ImportAlias, - ImportFrom, - IndentedBlock, - Match, - MatchAs, - MatchCase, - MatchClass, - MatchKeywordElement, - MatchList, - MatchMapping, - MatchMappingElement, - MatchOr, - MatchOrElement, - MatchPattern, - MatchSequence, - MatchSequenceElement, - MatchSingleton, - MatchStar, - MatchTuple, - MatchValue, - NameItem, - Nonlocal, - ParamSpec, - Pass, - Raise, - Return, - SimpleStatementLine, - SimpleStatementSuite, - Try, - TryStar, - TypeAlias, - TypeParam, - TypeParameters, - TypeVar, - TypeVarTuple, - While, - With, - WithItem, - ) - from libcst._nodes.whitespace import ( # noqa: F401 - BaseParenthesizableWhitespace, - Comment, - EmptyLine, - Newline, - ParenthesizedWhitespace, - SimpleWhitespace, - TrailingWhitespace, - ) - - -class CSTTypedBaseFunctions: - @mark_no_op - def visit_Add(self, node: "Add") -> Optional[bool]: - pass - - @mark_no_op - def visit_Add_whitespace_before(self, node: "Add") -> None: - pass - - @mark_no_op - def leave_Add_whitespace_before(self, node: "Add") -> None: - pass - - @mark_no_op - def visit_Add_whitespace_after(self, node: "Add") -> None: - pass - - @mark_no_op - def leave_Add_whitespace_after(self, node: "Add") -> None: - pass - - @mark_no_op - def visit_AddAssign(self, node: "AddAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_AddAssign_whitespace_before(self, node: "AddAssign") -> None: - pass - - @mark_no_op - def leave_AddAssign_whitespace_before(self, node: "AddAssign") -> None: - pass - - @mark_no_op - def visit_AddAssign_whitespace_after(self, node: "AddAssign") -> None: - pass - - @mark_no_op - def leave_AddAssign_whitespace_after(self, node: "AddAssign") -> None: - pass - - @mark_no_op - def visit_And(self, node: "And") -> Optional[bool]: - pass - - @mark_no_op - def visit_And_whitespace_before(self, node: "And") -> None: - pass - - @mark_no_op - def leave_And_whitespace_before(self, node: "And") -> None: - pass - - @mark_no_op - def visit_And_whitespace_after(self, node: "And") -> None: - pass - - @mark_no_op - def leave_And_whitespace_after(self, node: "And") -> None: - pass - - @mark_no_op - def visit_AnnAssign(self, node: "AnnAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_AnnAssign_target(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_target(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_AnnAssign_annotation(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_annotation(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_AnnAssign_value(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_value(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_AnnAssign_equal(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_equal(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_AnnAssign_semicolon(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_AnnAssign_semicolon(self, node: "AnnAssign") -> None: - pass - - @mark_no_op - def visit_Annotation(self, node: "Annotation") -> Optional[bool]: - pass - - @mark_no_op - def visit_Annotation_annotation(self, node: "Annotation") -> None: - pass - - @mark_no_op - def leave_Annotation_annotation(self, node: "Annotation") -> None: - pass - - @mark_no_op - def visit_Annotation_whitespace_before_indicator(self, node: "Annotation") -> None: - pass - - @mark_no_op - def leave_Annotation_whitespace_before_indicator(self, node: "Annotation") -> None: - pass - - @mark_no_op - def visit_Annotation_whitespace_after_indicator(self, node: "Annotation") -> None: - pass - - @mark_no_op - def leave_Annotation_whitespace_after_indicator(self, node: "Annotation") -> None: - pass - - @mark_no_op - def visit_Arg(self, node: "Arg") -> Optional[bool]: - pass - - @mark_no_op - def visit_Arg_value(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_value(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_keyword(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_keyword(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_equal(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_equal(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_comma(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_comma(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_star(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_star(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_whitespace_after_star(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_whitespace_after_star(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_Arg_whitespace_after_arg(self, node: "Arg") -> None: - pass - - @mark_no_op - def leave_Arg_whitespace_after_arg(self, node: "Arg") -> None: - pass - - @mark_no_op - def visit_AsName(self, node: "AsName") -> Optional[bool]: - pass - - @mark_no_op - def visit_AsName_name(self, node: "AsName") -> None: - pass - - @mark_no_op - def leave_AsName_name(self, node: "AsName") -> None: - pass - - @mark_no_op - def visit_AsName_whitespace_before_as(self, node: "AsName") -> None: - pass - - @mark_no_op - def leave_AsName_whitespace_before_as(self, node: "AsName") -> None: - pass - - @mark_no_op - def visit_AsName_whitespace_after_as(self, node: "AsName") -> None: - pass - - @mark_no_op - def leave_AsName_whitespace_after_as(self, node: "AsName") -> None: - pass - - @mark_no_op - def visit_Assert(self, node: "Assert") -> Optional[bool]: - pass - - @mark_no_op - def visit_Assert_test(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_test(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assert_msg(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_msg(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assert_comma(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_comma(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assert_whitespace_after_assert(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_whitespace_after_assert(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assert_semicolon(self, node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assert_semicolon(self, node: "Assert") -> None: - pass - - @mark_no_op - def visit_Assign(self, node: "Assign") -> Optional[bool]: - pass - - @mark_no_op - def visit_Assign_targets(self, node: "Assign") -> None: - pass - - @mark_no_op - def leave_Assign_targets(self, node: "Assign") -> None: - pass - - @mark_no_op - def visit_Assign_value(self, node: "Assign") -> None: - pass - - @mark_no_op - def leave_Assign_value(self, node: "Assign") -> None: - pass - - @mark_no_op - def visit_Assign_semicolon(self, node: "Assign") -> None: - pass - - @mark_no_op - def leave_Assign_semicolon(self, node: "Assign") -> None: - pass - - @mark_no_op - def visit_AssignEqual(self, node: "AssignEqual") -> Optional[bool]: - pass - - @mark_no_op - def visit_AssignEqual_whitespace_before(self, node: "AssignEqual") -> None: - pass - - @mark_no_op - def leave_AssignEqual_whitespace_before(self, node: "AssignEqual") -> None: - pass - - @mark_no_op - def visit_AssignEqual_whitespace_after(self, node: "AssignEqual") -> None: - pass - - @mark_no_op - def leave_AssignEqual_whitespace_after(self, node: "AssignEqual") -> None: - pass - - @mark_no_op - def visit_AssignTarget(self, node: "AssignTarget") -> Optional[bool]: - pass - - @mark_no_op - def visit_AssignTarget_target(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def leave_AssignTarget_target(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def visit_AssignTarget_whitespace_before_equal(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def leave_AssignTarget_whitespace_before_equal(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def visit_AssignTarget_whitespace_after_equal(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def leave_AssignTarget_whitespace_after_equal(self, node: "AssignTarget") -> None: - pass - - @mark_no_op - def visit_Asynchronous(self, node: "Asynchronous") -> Optional[bool]: - pass - - @mark_no_op - def visit_Asynchronous_whitespace_after(self, node: "Asynchronous") -> None: - pass - - @mark_no_op - def leave_Asynchronous_whitespace_after(self, node: "Asynchronous") -> None: - pass - - @mark_no_op - def visit_Attribute(self, node: "Attribute") -> Optional[bool]: - pass - - @mark_no_op - def visit_Attribute_value(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_value(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_Attribute_attr(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_attr(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_Attribute_dot(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_dot(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_Attribute_lpar(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_lpar(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_Attribute_rpar(self, node: "Attribute") -> None: - pass - - @mark_no_op - def leave_Attribute_rpar(self, node: "Attribute") -> None: - pass - - @mark_no_op - def visit_AugAssign(self, node: "AugAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_AugAssign_target(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_AugAssign_target(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def visit_AugAssign_operator(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_AugAssign_operator(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def visit_AugAssign_value(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_AugAssign_value(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def visit_AugAssign_semicolon(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_AugAssign_semicolon(self, node: "AugAssign") -> None: - pass - - @mark_no_op - def visit_Await(self, node: "Await") -> Optional[bool]: - pass - - @mark_no_op - def visit_Await_expression(self, node: "Await") -> None: - pass - - @mark_no_op - def leave_Await_expression(self, node: "Await") -> None: - pass - - @mark_no_op - def visit_Await_lpar(self, node: "Await") -> None: - pass - - @mark_no_op - def leave_Await_lpar(self, node: "Await") -> None: - pass - - @mark_no_op - def visit_Await_rpar(self, node: "Await") -> None: - pass - - @mark_no_op - def leave_Await_rpar(self, node: "Await") -> None: - pass - - @mark_no_op - def visit_Await_whitespace_after_await(self, node: "Await") -> None: - pass - - @mark_no_op - def leave_Await_whitespace_after_await(self, node: "Await") -> None: - pass - - @mark_no_op - def visit_BinaryOperation(self, node: "BinaryOperation") -> Optional[bool]: - pass - - @mark_no_op - def visit_BinaryOperation_left(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_left(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BinaryOperation_operator(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_operator(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BinaryOperation_right(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_right(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BinaryOperation_lpar(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_lpar(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BinaryOperation_rpar(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BinaryOperation_rpar(self, node: "BinaryOperation") -> None: - pass - - @mark_no_op - def visit_BitAnd(self, node: "BitAnd") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitAnd_whitespace_before(self, node: "BitAnd") -> None: - pass - - @mark_no_op - def leave_BitAnd_whitespace_before(self, node: "BitAnd") -> None: - pass - - @mark_no_op - def visit_BitAnd_whitespace_after(self, node: "BitAnd") -> None: - pass - - @mark_no_op - def leave_BitAnd_whitespace_after(self, node: "BitAnd") -> None: - pass - - @mark_no_op - def visit_BitAndAssign(self, node: "BitAndAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitAndAssign_whitespace_before(self, node: "BitAndAssign") -> None: - pass - - @mark_no_op - def leave_BitAndAssign_whitespace_before(self, node: "BitAndAssign") -> None: - pass - - @mark_no_op - def visit_BitAndAssign_whitespace_after(self, node: "BitAndAssign") -> None: - pass - - @mark_no_op - def leave_BitAndAssign_whitespace_after(self, node: "BitAndAssign") -> None: - pass - - @mark_no_op - def visit_BitInvert(self, node: "BitInvert") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitInvert_whitespace_after(self, node: "BitInvert") -> None: - pass - - @mark_no_op - def leave_BitInvert_whitespace_after(self, node: "BitInvert") -> None: - pass - - @mark_no_op - def visit_BitOr(self, node: "BitOr") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitOr_whitespace_before(self, node: "BitOr") -> None: - pass - - @mark_no_op - def leave_BitOr_whitespace_before(self, node: "BitOr") -> None: - pass - - @mark_no_op - def visit_BitOr_whitespace_after(self, node: "BitOr") -> None: - pass - - @mark_no_op - def leave_BitOr_whitespace_after(self, node: "BitOr") -> None: - pass - - @mark_no_op - def visit_BitOrAssign(self, node: "BitOrAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitOrAssign_whitespace_before(self, node: "BitOrAssign") -> None: - pass - - @mark_no_op - def leave_BitOrAssign_whitespace_before(self, node: "BitOrAssign") -> None: - pass - - @mark_no_op - def visit_BitOrAssign_whitespace_after(self, node: "BitOrAssign") -> None: - pass - - @mark_no_op - def leave_BitOrAssign_whitespace_after(self, node: "BitOrAssign") -> None: - pass - - @mark_no_op - def visit_BitXor(self, node: "BitXor") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitXor_whitespace_before(self, node: "BitXor") -> None: - pass - - @mark_no_op - def leave_BitXor_whitespace_before(self, node: "BitXor") -> None: - pass - - @mark_no_op - def visit_BitXor_whitespace_after(self, node: "BitXor") -> None: - pass - - @mark_no_op - def leave_BitXor_whitespace_after(self, node: "BitXor") -> None: - pass - - @mark_no_op - def visit_BitXorAssign(self, node: "BitXorAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_BitXorAssign_whitespace_before(self, node: "BitXorAssign") -> None: - pass - - @mark_no_op - def leave_BitXorAssign_whitespace_before(self, node: "BitXorAssign") -> None: - pass - - @mark_no_op - def visit_BitXorAssign_whitespace_after(self, node: "BitXorAssign") -> None: - pass - - @mark_no_op - def leave_BitXorAssign_whitespace_after(self, node: "BitXorAssign") -> None: - pass - - @mark_no_op - def visit_BooleanOperation(self, node: "BooleanOperation") -> Optional[bool]: - pass - - @mark_no_op - def visit_BooleanOperation_left(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_left(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_BooleanOperation_operator(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_operator(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_BooleanOperation_right(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_right(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_BooleanOperation_lpar(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_lpar(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_BooleanOperation_rpar(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_BooleanOperation_rpar(self, node: "BooleanOperation") -> None: - pass - - @mark_no_op - def visit_Break(self, node: "Break") -> Optional[bool]: - pass - - @mark_no_op - def visit_Break_semicolon(self, node: "Break") -> None: - pass - - @mark_no_op - def leave_Break_semicolon(self, node: "Break") -> None: - pass - - @mark_no_op - def visit_Call(self, node: "Call") -> Optional[bool]: - pass - - @mark_no_op - def visit_Call_func(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_func(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_args(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_args(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_lpar(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_lpar(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_rpar(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_rpar(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_whitespace_after_func(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_whitespace_after_func(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_Call_whitespace_before_args(self, node: "Call") -> None: - pass - - @mark_no_op - def leave_Call_whitespace_before_args(self, node: "Call") -> None: - pass - - @mark_no_op - def visit_ClassDef(self, node: "ClassDef") -> Optional[bool]: - pass - - @mark_no_op - def visit_ClassDef_name(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_name(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_body(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_body(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_bases(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_bases(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_keywords(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_keywords(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_decorators(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_decorators(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_lpar(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_lpar(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_rpar(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_rpar(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_leading_lines(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_leading_lines(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_lines_after_decorators(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_lines_after_decorators(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_whitespace_after_class(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_whitespace_after_class(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_whitespace_after_name(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_whitespace_after_name(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_type_parameters(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_type_parameters(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_whitespace_after_type_parameters(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_whitespace_after_type_parameters(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_Colon(self, node: "Colon") -> Optional[bool]: - pass - - @mark_no_op - def visit_Colon_whitespace_before(self, node: "Colon") -> None: - pass - - @mark_no_op - def leave_Colon_whitespace_before(self, node: "Colon") -> None: - pass - - @mark_no_op - def visit_Colon_whitespace_after(self, node: "Colon") -> None: - pass - - @mark_no_op - def leave_Colon_whitespace_after(self, node: "Colon") -> None: - pass - - @mark_no_op - def visit_Comma(self, node: "Comma") -> Optional[bool]: - pass - - @mark_no_op - def visit_Comma_whitespace_before(self, node: "Comma") -> None: - pass - - @mark_no_op - def leave_Comma_whitespace_before(self, node: "Comma") -> None: - pass - - @mark_no_op - def visit_Comma_whitespace_after(self, node: "Comma") -> None: - pass - - @mark_no_op - def leave_Comma_whitespace_after(self, node: "Comma") -> None: - pass - - @mark_no_op - def visit_Comment(self, node: "Comment") -> Optional[bool]: - pass - - @mark_no_op - def visit_Comment_value(self, node: "Comment") -> None: - pass - - @mark_no_op - def leave_Comment_value(self, node: "Comment") -> None: - pass - - @mark_no_op - def visit_CompFor(self, node: "CompFor") -> Optional[bool]: - pass - - @mark_no_op - def visit_CompFor_target(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_target(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_iter(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_iter(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_ifs(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_ifs(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_inner_for_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_inner_for_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_asynchronous(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_asynchronous(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_whitespace_before(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_whitespace_before(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_whitespace_after_for(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_whitespace_after_for(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_whitespace_before_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_whitespace_before_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompFor_whitespace_after_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompFor_whitespace_after_in(self, node: "CompFor") -> None: - pass - - @mark_no_op - def visit_CompIf(self, node: "CompIf") -> Optional[bool]: - pass - - @mark_no_op - def visit_CompIf_test(self, node: "CompIf") -> None: - pass - - @mark_no_op - def leave_CompIf_test(self, node: "CompIf") -> None: - pass - - @mark_no_op - def visit_CompIf_whitespace_before(self, node: "CompIf") -> None: - pass - - @mark_no_op - def leave_CompIf_whitespace_before(self, node: "CompIf") -> None: - pass - - @mark_no_op - def visit_CompIf_whitespace_before_test(self, node: "CompIf") -> None: - pass - - @mark_no_op - def leave_CompIf_whitespace_before_test(self, node: "CompIf") -> None: - pass - - @mark_no_op - def visit_Comparison(self, node: "Comparison") -> Optional[bool]: - pass - - @mark_no_op - def visit_Comparison_left(self, node: "Comparison") -> None: - pass - - @mark_no_op - def leave_Comparison_left(self, node: "Comparison") -> None: - pass - - @mark_no_op - def visit_Comparison_comparisons(self, node: "Comparison") -> None: - pass - - @mark_no_op - def leave_Comparison_comparisons(self, node: "Comparison") -> None: - pass - - @mark_no_op - def visit_Comparison_lpar(self, node: "Comparison") -> None: - pass - - @mark_no_op - def leave_Comparison_lpar(self, node: "Comparison") -> None: - pass - - @mark_no_op - def visit_Comparison_rpar(self, node: "Comparison") -> None: - pass - - @mark_no_op - def leave_Comparison_rpar(self, node: "Comparison") -> None: - pass - - @mark_no_op - def visit_ComparisonTarget(self, node: "ComparisonTarget") -> Optional[bool]: - pass - - @mark_no_op - def visit_ComparisonTarget_operator(self, node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def leave_ComparisonTarget_operator(self, node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def visit_ComparisonTarget_comparator(self, node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def leave_ComparisonTarget_comparator(self, node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString(self, node: "ConcatenatedString") -> Optional[bool]: - pass - - @mark_no_op - def visit_ConcatenatedString_left(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_left(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString_right(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_right(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString_lpar(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_lpar(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString_rpar(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_rpar(self, node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def visit_ConcatenatedString_whitespace_between( - self, node: "ConcatenatedString" - ) -> None: - pass - - @mark_no_op - def leave_ConcatenatedString_whitespace_between( - self, node: "ConcatenatedString" - ) -> None: - pass - - @mark_no_op - def visit_Continue(self, node: "Continue") -> Optional[bool]: - pass - - @mark_no_op - def visit_Continue_semicolon(self, node: "Continue") -> None: - pass - - @mark_no_op - def leave_Continue_semicolon(self, node: "Continue") -> None: - pass - - @mark_no_op - def visit_Decorator(self, node: "Decorator") -> Optional[bool]: - pass - - @mark_no_op - def visit_Decorator_decorator(self, node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Decorator_decorator(self, node: "Decorator") -> None: - pass - - @mark_no_op - def visit_Decorator_leading_lines(self, node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Decorator_leading_lines(self, node: "Decorator") -> None: - pass - - @mark_no_op - def visit_Decorator_whitespace_after_at(self, node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Decorator_whitespace_after_at(self, node: "Decorator") -> None: - pass - - @mark_no_op - def visit_Decorator_trailing_whitespace(self, node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Decorator_trailing_whitespace(self, node: "Decorator") -> None: - pass - - @mark_no_op - def visit_Del(self, node: "Del") -> Optional[bool]: - pass - - @mark_no_op - def visit_Del_target(self, node: "Del") -> None: - pass - - @mark_no_op - def leave_Del_target(self, node: "Del") -> None: - pass - - @mark_no_op - def visit_Del_whitespace_after_del(self, node: "Del") -> None: - pass - - @mark_no_op - def leave_Del_whitespace_after_del(self, node: "Del") -> None: - pass - - @mark_no_op - def visit_Del_semicolon(self, node: "Del") -> None: - pass - - @mark_no_op - def leave_Del_semicolon(self, node: "Del") -> None: - pass - - @mark_no_op - def visit_Dict(self, node: "Dict") -> Optional[bool]: - pass - - @mark_no_op - def visit_Dict_elements(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_elements(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_Dict_lbrace(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_lbrace(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_Dict_rbrace(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_rbrace(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_Dict_lpar(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_lpar(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_Dict_rpar(self, node: "Dict") -> None: - pass - - @mark_no_op - def leave_Dict_rpar(self, node: "Dict") -> None: - pass - - @mark_no_op - def visit_DictComp(self, node: "DictComp") -> Optional[bool]: - pass - - @mark_no_op - def visit_DictComp_key(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_key(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_value(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_value(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_for_in(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_for_in(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_lbrace(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_lbrace(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_rbrace(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_rbrace(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_lpar(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_lpar(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_rpar(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_rpar(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_whitespace_before_colon(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_whitespace_before_colon(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictComp_whitespace_after_colon(self, node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictComp_whitespace_after_colon(self, node: "DictComp") -> None: - pass - - @mark_no_op - def visit_DictElement(self, node: "DictElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_DictElement_key(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_key(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_DictElement_value(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_value(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_DictElement_comma(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_comma(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_DictElement_whitespace_before_colon(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_whitespace_before_colon(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_DictElement_whitespace_after_colon(self, node: "DictElement") -> None: - pass - - @mark_no_op - def leave_DictElement_whitespace_after_colon(self, node: "DictElement") -> None: - pass - - @mark_no_op - def visit_Divide(self, node: "Divide") -> Optional[bool]: - pass - - @mark_no_op - def visit_Divide_whitespace_before(self, node: "Divide") -> None: - pass - - @mark_no_op - def leave_Divide_whitespace_before(self, node: "Divide") -> None: - pass - - @mark_no_op - def visit_Divide_whitespace_after(self, node: "Divide") -> None: - pass - - @mark_no_op - def leave_Divide_whitespace_after(self, node: "Divide") -> None: - pass - - @mark_no_op - def visit_DivideAssign(self, node: "DivideAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_DivideAssign_whitespace_before(self, node: "DivideAssign") -> None: - pass - - @mark_no_op - def leave_DivideAssign_whitespace_before(self, node: "DivideAssign") -> None: - pass - - @mark_no_op - def visit_DivideAssign_whitespace_after(self, node: "DivideAssign") -> None: - pass - - @mark_no_op - def leave_DivideAssign_whitespace_after(self, node: "DivideAssign") -> None: - pass - - @mark_no_op - def visit_Dot(self, node: "Dot") -> Optional[bool]: - pass - - @mark_no_op - def visit_Dot_whitespace_before(self, node: "Dot") -> None: - pass - - @mark_no_op - def leave_Dot_whitespace_before(self, node: "Dot") -> None: - pass - - @mark_no_op - def visit_Dot_whitespace_after(self, node: "Dot") -> None: - pass - - @mark_no_op - def leave_Dot_whitespace_after(self, node: "Dot") -> None: - pass - - @mark_no_op - def visit_Element(self, node: "Element") -> Optional[bool]: - pass - - @mark_no_op - def visit_Element_value(self, node: "Element") -> None: - pass - - @mark_no_op - def leave_Element_value(self, node: "Element") -> None: - pass - - @mark_no_op - def visit_Element_comma(self, node: "Element") -> None: - pass - - @mark_no_op - def leave_Element_comma(self, node: "Element") -> None: - pass - - @mark_no_op - def visit_Ellipsis(self, node: "Ellipsis") -> Optional[bool]: - pass - - @mark_no_op - def visit_Ellipsis_lpar(self, node: "Ellipsis") -> None: - pass - - @mark_no_op - def leave_Ellipsis_lpar(self, node: "Ellipsis") -> None: - pass - - @mark_no_op - def visit_Ellipsis_rpar(self, node: "Ellipsis") -> None: - pass - - @mark_no_op - def leave_Ellipsis_rpar(self, node: "Ellipsis") -> None: - pass - - @mark_no_op - def visit_Else(self, node: "Else") -> Optional[bool]: - pass - - @mark_no_op - def visit_Else_body(self, node: "Else") -> None: - pass - - @mark_no_op - def leave_Else_body(self, node: "Else") -> None: - pass - - @mark_no_op - def visit_Else_leading_lines(self, node: "Else") -> None: - pass - - @mark_no_op - def leave_Else_leading_lines(self, node: "Else") -> None: - pass - - @mark_no_op - def visit_Else_whitespace_before_colon(self, node: "Else") -> None: - pass - - @mark_no_op - def leave_Else_whitespace_before_colon(self, node: "Else") -> None: - pass - - @mark_no_op - def visit_EmptyLine(self, node: "EmptyLine") -> Optional[bool]: - pass - - @mark_no_op - def visit_EmptyLine_indent(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_EmptyLine_indent(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def visit_EmptyLine_whitespace(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_EmptyLine_whitespace(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def visit_EmptyLine_comment(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_EmptyLine_comment(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def visit_EmptyLine_newline(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_EmptyLine_newline(self, node: "EmptyLine") -> None: - pass - - @mark_no_op - def visit_Equal(self, node: "Equal") -> Optional[bool]: - pass - - @mark_no_op - def visit_Equal_whitespace_before(self, node: "Equal") -> None: - pass - - @mark_no_op - def leave_Equal_whitespace_before(self, node: "Equal") -> None: - pass - - @mark_no_op - def visit_Equal_whitespace_after(self, node: "Equal") -> None: - pass - - @mark_no_op - def leave_Equal_whitespace_after(self, node: "Equal") -> None: - pass - - @mark_no_op - def visit_ExceptHandler(self, node: "ExceptHandler") -> Optional[bool]: - pass - - @mark_no_op - def visit_ExceptHandler_body(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptHandler_body(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def visit_ExceptHandler_type(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptHandler_type(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def visit_ExceptHandler_name(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptHandler_name(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def visit_ExceptHandler_leading_lines(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptHandler_leading_lines(self, node: "ExceptHandler") -> None: - pass - - @mark_no_op - def visit_ExceptHandler_whitespace_after_except( - self, node: "ExceptHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptHandler_whitespace_after_except( - self, node: "ExceptHandler" - ) -> None: - pass - - @mark_no_op - def visit_ExceptHandler_whitespace_before_colon( - self, node: "ExceptHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptHandler_whitespace_before_colon( - self, node: "ExceptHandler" - ) -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler(self, node: "ExceptStarHandler") -> Optional[bool]: - pass - - @mark_no_op - def visit_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_whitespace_after_except( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_whitespace_after_except( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_whitespace_after_star( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_whitespace_after_star( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def visit_ExceptStarHandler_whitespace_before_colon( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler_whitespace_before_colon( - self, node: "ExceptStarHandler" - ) -> None: - pass - - @mark_no_op - def visit_Expr(self, node: "Expr") -> Optional[bool]: - pass - - @mark_no_op - def visit_Expr_value(self, node: "Expr") -> None: - pass - - @mark_no_op - def leave_Expr_value(self, node: "Expr") -> None: - pass - - @mark_no_op - def visit_Expr_semicolon(self, node: "Expr") -> None: - pass - - @mark_no_op - def leave_Expr_semicolon(self, node: "Expr") -> None: - pass - - @mark_no_op - def visit_Finally(self, node: "Finally") -> Optional[bool]: - pass - - @mark_no_op - def visit_Finally_body(self, node: "Finally") -> None: - pass - - @mark_no_op - def leave_Finally_body(self, node: "Finally") -> None: - pass - - @mark_no_op - def visit_Finally_leading_lines(self, node: "Finally") -> None: - pass - - @mark_no_op - def leave_Finally_leading_lines(self, node: "Finally") -> None: - pass - - @mark_no_op - def visit_Finally_whitespace_before_colon(self, node: "Finally") -> None: - pass - - @mark_no_op - def leave_Finally_whitespace_before_colon(self, node: "Finally") -> None: - pass - - @mark_no_op - def visit_Float(self, node: "Float") -> Optional[bool]: - pass - - @mark_no_op - def visit_Float_value(self, node: "Float") -> None: - pass - - @mark_no_op - def leave_Float_value(self, node: "Float") -> None: - pass - - @mark_no_op - def visit_Float_lpar(self, node: "Float") -> None: - pass - - @mark_no_op - def leave_Float_lpar(self, node: "Float") -> None: - pass - - @mark_no_op - def visit_Float_rpar(self, node: "Float") -> None: - pass - - @mark_no_op - def leave_Float_rpar(self, node: "Float") -> None: - pass - - @mark_no_op - def visit_FloorDivide(self, node: "FloorDivide") -> Optional[bool]: - pass - - @mark_no_op - def visit_FloorDivide_whitespace_before(self, node: "FloorDivide") -> None: - pass - - @mark_no_op - def leave_FloorDivide_whitespace_before(self, node: "FloorDivide") -> None: - pass - - @mark_no_op - def visit_FloorDivide_whitespace_after(self, node: "FloorDivide") -> None: - pass - - @mark_no_op - def leave_FloorDivide_whitespace_after(self, node: "FloorDivide") -> None: - pass - - @mark_no_op - def visit_FloorDivideAssign(self, node: "FloorDivideAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_FloorDivideAssign_whitespace_before( - self, node: "FloorDivideAssign" - ) -> None: - pass - - @mark_no_op - def leave_FloorDivideAssign_whitespace_before( - self, node: "FloorDivideAssign" - ) -> None: - pass - - @mark_no_op - def visit_FloorDivideAssign_whitespace_after( - self, node: "FloorDivideAssign" - ) -> None: - pass - - @mark_no_op - def leave_FloorDivideAssign_whitespace_after( - self, node: "FloorDivideAssign" - ) -> None: - pass - - @mark_no_op - def visit_For(self, node: "For") -> Optional[bool]: - pass - - @mark_no_op - def visit_For_target(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_target(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_iter(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_iter(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_body(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_body(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_orelse(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_orelse(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_asynchronous(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_asynchronous(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_leading_lines(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_leading_lines(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_whitespace_after_for(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_whitespace_after_for(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_whitespace_before_in(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_whitespace_before_in(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_whitespace_after_in(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_whitespace_after_in(self, node: "For") -> None: - pass - - @mark_no_op - def visit_For_whitespace_before_colon(self, node: "For") -> None: - pass - - @mark_no_op - def leave_For_whitespace_before_colon(self, node: "For") -> None: - pass - - @mark_no_op - def visit_FormattedString(self, node: "FormattedString") -> Optional[bool]: - pass - - @mark_no_op - def visit_FormattedString_parts(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_parts(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedString_start(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_start(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedString_end(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_end(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedString_lpar(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_lpar(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedString_rpar(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedString_rpar(self, node: "FormattedString") -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression( - self, node: "FormattedStringExpression" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_FormattedStringExpression_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_conversion( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_conversion( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_format_spec( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_format_spec( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_whitespace_before_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_whitespace_before_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_whitespace_after_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_whitespace_after_expression( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringExpression_equal( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression_equal( - self, node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_FormattedStringText(self, node: "FormattedStringText") -> Optional[bool]: - pass - - @mark_no_op - def visit_FormattedStringText_value(self, node: "FormattedStringText") -> None: - pass - - @mark_no_op - def leave_FormattedStringText_value(self, node: "FormattedStringText") -> None: - pass - - @mark_no_op - def visit_From(self, node: "From") -> Optional[bool]: - pass - - @mark_no_op - def visit_From_item(self, node: "From") -> None: - pass - - @mark_no_op - def leave_From_item(self, node: "From") -> None: - pass - - @mark_no_op - def visit_From_whitespace_before_from(self, node: "From") -> None: - pass - - @mark_no_op - def leave_From_whitespace_before_from(self, node: "From") -> None: - pass - - @mark_no_op - def visit_From_whitespace_after_from(self, node: "From") -> None: - pass - - @mark_no_op - def leave_From_whitespace_after_from(self, node: "From") -> None: - pass - - @mark_no_op - def visit_FunctionDef(self, node: "FunctionDef") -> Optional[bool]: - pass - - @mark_no_op - def visit_FunctionDef_name(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_name(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_params(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_params(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_body(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_body(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_decorators(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_decorators(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_returns(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_returns(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_asynchronous(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_asynchronous(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_leading_lines(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_leading_lines(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_lines_after_decorators(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_lines_after_decorators(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_after_def(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_after_def(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_after_name(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_after_name(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_before_params(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_before_params(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_type_parameters(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_type_parameters(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_after_type_parameters( - self, node: "FunctionDef" - ) -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_after_type_parameters( - self, node: "FunctionDef" - ) -> None: - pass - - @mark_no_op - def visit_GeneratorExp(self, node: "GeneratorExp") -> Optional[bool]: - pass - - @mark_no_op - def visit_GeneratorExp_elt(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_GeneratorExp_elt(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def visit_GeneratorExp_for_in(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_GeneratorExp_for_in(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def visit_GeneratorExp_lpar(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_GeneratorExp_lpar(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def visit_GeneratorExp_rpar(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_GeneratorExp_rpar(self, node: "GeneratorExp") -> None: - pass - - @mark_no_op - def visit_Global(self, node: "Global") -> Optional[bool]: - pass - - @mark_no_op - def visit_Global_names(self, node: "Global") -> None: - pass - - @mark_no_op - def leave_Global_names(self, node: "Global") -> None: - pass - - @mark_no_op - def visit_Global_whitespace_after_global(self, node: "Global") -> None: - pass - - @mark_no_op - def leave_Global_whitespace_after_global(self, node: "Global") -> None: - pass - - @mark_no_op - def visit_Global_semicolon(self, node: "Global") -> None: - pass - - @mark_no_op - def leave_Global_semicolon(self, node: "Global") -> None: - pass - - @mark_no_op - def visit_GreaterThan(self, node: "GreaterThan") -> Optional[bool]: - pass - - @mark_no_op - def visit_GreaterThan_whitespace_before(self, node: "GreaterThan") -> None: - pass - - @mark_no_op - def leave_GreaterThan_whitespace_before(self, node: "GreaterThan") -> None: - pass - - @mark_no_op - def visit_GreaterThan_whitespace_after(self, node: "GreaterThan") -> None: - pass - - @mark_no_op - def leave_GreaterThan_whitespace_after(self, node: "GreaterThan") -> None: - pass - - @mark_no_op - def visit_GreaterThanEqual(self, node: "GreaterThanEqual") -> Optional[bool]: - pass - - @mark_no_op - def visit_GreaterThanEqual_whitespace_before( - self, node: "GreaterThanEqual" - ) -> None: - pass - - @mark_no_op - def leave_GreaterThanEqual_whitespace_before( - self, node: "GreaterThanEqual" - ) -> None: - pass - - @mark_no_op - def visit_GreaterThanEqual_whitespace_after(self, node: "GreaterThanEqual") -> None: - pass - - @mark_no_op - def leave_GreaterThanEqual_whitespace_after(self, node: "GreaterThanEqual") -> None: - pass - - @mark_no_op - def visit_If(self, node: "If") -> Optional[bool]: - pass - - @mark_no_op - def visit_If_test(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_test(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_body(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_body(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_orelse(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_orelse(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_leading_lines(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_leading_lines(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_whitespace_before_test(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_whitespace_before_test(self, node: "If") -> None: - pass - - @mark_no_op - def visit_If_whitespace_after_test(self, node: "If") -> None: - pass - - @mark_no_op - def leave_If_whitespace_after_test(self, node: "If") -> None: - pass - - @mark_no_op - def visit_IfExp(self, node: "IfExp") -> Optional[bool]: - pass - - @mark_no_op - def visit_IfExp_test(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_test(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_body(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_body(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_orelse(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_orelse(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_lpar(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_lpar(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_rpar(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_rpar(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_whitespace_before_if(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_whitespace_before_if(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_whitespace_after_if(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_whitespace_after_if(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_whitespace_before_else(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_whitespace_before_else(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_IfExp_whitespace_after_else(self, node: "IfExp") -> None: - pass - - @mark_no_op - def leave_IfExp_whitespace_after_else(self, node: "IfExp") -> None: - pass - - @mark_no_op - def visit_Imaginary(self, node: "Imaginary") -> Optional[bool]: - pass - - @mark_no_op - def visit_Imaginary_value(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def leave_Imaginary_value(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def visit_Imaginary_lpar(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def leave_Imaginary_lpar(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def visit_Imaginary_rpar(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def leave_Imaginary_rpar(self, node: "Imaginary") -> None: - pass - - @mark_no_op - def visit_Import(self, node: "Import") -> Optional[bool]: - pass - - @mark_no_op - def visit_Import_names(self, node: "Import") -> None: - pass - - @mark_no_op - def leave_Import_names(self, node: "Import") -> None: - pass - - @mark_no_op - def visit_Import_semicolon(self, node: "Import") -> None: - pass - - @mark_no_op - def leave_Import_semicolon(self, node: "Import") -> None: - pass - - @mark_no_op - def visit_Import_whitespace_after_import(self, node: "Import") -> None: - pass - - @mark_no_op - def leave_Import_whitespace_after_import(self, node: "Import") -> None: - pass - - @mark_no_op - def visit_ImportAlias(self, node: "ImportAlias") -> Optional[bool]: - pass - - @mark_no_op - def visit_ImportAlias_name(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def leave_ImportAlias_name(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def visit_ImportAlias_asname(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def leave_ImportAlias_asname(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def visit_ImportAlias_comma(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def leave_ImportAlias_comma(self, node: "ImportAlias") -> None: - pass - - @mark_no_op - def visit_ImportFrom(self, node: "ImportFrom") -> Optional[bool]: - pass - - @mark_no_op - def visit_ImportFrom_module(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_module(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_names(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_names(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_relative(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_relative(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_lpar(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_lpar(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_rpar(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_rpar(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_semicolon(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_semicolon(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_whitespace_after_from(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_whitespace_after_from(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_whitespace_before_import(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_whitespace_before_import(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportFrom_whitespace_after_import(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportFrom_whitespace_after_import(self, node: "ImportFrom") -> None: - pass - - @mark_no_op - def visit_ImportStar(self, node: "ImportStar") -> Optional[bool]: - pass - - @mark_no_op - def visit_In(self, node: "In") -> Optional[bool]: - pass - - @mark_no_op - def visit_In_whitespace_before(self, node: "In") -> None: - pass - - @mark_no_op - def leave_In_whitespace_before(self, node: "In") -> None: - pass - - @mark_no_op - def visit_In_whitespace_after(self, node: "In") -> None: - pass - - @mark_no_op - def leave_In_whitespace_after(self, node: "In") -> None: - pass - - @mark_no_op - def visit_IndentedBlock(self, node: "IndentedBlock") -> Optional[bool]: - pass - - @mark_no_op - def visit_IndentedBlock_body(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_IndentedBlock_body(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def visit_IndentedBlock_header(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_IndentedBlock_header(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def visit_IndentedBlock_indent(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_IndentedBlock_indent(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def visit_IndentedBlock_footer(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_IndentedBlock_footer(self, node: "IndentedBlock") -> None: - pass - - @mark_no_op - def visit_Index(self, node: "Index") -> Optional[bool]: - pass - - @mark_no_op - def visit_Index_value(self, node: "Index") -> None: - pass - - @mark_no_op - def leave_Index_value(self, node: "Index") -> None: - pass - - @mark_no_op - def visit_Index_star(self, node: "Index") -> None: - pass - - @mark_no_op - def leave_Index_star(self, node: "Index") -> None: - pass - - @mark_no_op - def visit_Index_whitespace_after_star(self, node: "Index") -> None: - pass - - @mark_no_op - def leave_Index_whitespace_after_star(self, node: "Index") -> None: - pass - - @mark_no_op - def visit_Integer(self, node: "Integer") -> Optional[bool]: - pass - - @mark_no_op - def visit_Integer_value(self, node: "Integer") -> None: - pass - - @mark_no_op - def leave_Integer_value(self, node: "Integer") -> None: - pass - - @mark_no_op - def visit_Integer_lpar(self, node: "Integer") -> None: - pass - - @mark_no_op - def leave_Integer_lpar(self, node: "Integer") -> None: - pass - - @mark_no_op - def visit_Integer_rpar(self, node: "Integer") -> None: - pass - - @mark_no_op - def leave_Integer_rpar(self, node: "Integer") -> None: - pass - - @mark_no_op - def visit_Is(self, node: "Is") -> Optional[bool]: - pass - - @mark_no_op - def visit_Is_whitespace_before(self, node: "Is") -> None: - pass - - @mark_no_op - def leave_Is_whitespace_before(self, node: "Is") -> None: - pass - - @mark_no_op - def visit_Is_whitespace_after(self, node: "Is") -> None: - pass - - @mark_no_op - def leave_Is_whitespace_after(self, node: "Is") -> None: - pass - - @mark_no_op - def visit_IsNot(self, node: "IsNot") -> Optional[bool]: - pass - - @mark_no_op - def visit_IsNot_whitespace_before(self, node: "IsNot") -> None: - pass - - @mark_no_op - def leave_IsNot_whitespace_before(self, node: "IsNot") -> None: - pass - - @mark_no_op - def visit_IsNot_whitespace_between(self, node: "IsNot") -> None: - pass - - @mark_no_op - def leave_IsNot_whitespace_between(self, node: "IsNot") -> None: - pass - - @mark_no_op - def visit_IsNot_whitespace_after(self, node: "IsNot") -> None: - pass - - @mark_no_op - def leave_IsNot_whitespace_after(self, node: "IsNot") -> None: - pass - - @mark_no_op - def visit_Lambda(self, node: "Lambda") -> Optional[bool]: - pass - - @mark_no_op - def visit_Lambda_params(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_params(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_body(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_body(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_colon(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_colon(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_lpar(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_lpar(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_rpar(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_rpar(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_Lambda_whitespace_after_lambda(self, node: "Lambda") -> None: - pass - - @mark_no_op - def leave_Lambda_whitespace_after_lambda(self, node: "Lambda") -> None: - pass - - @mark_no_op - def visit_LeftCurlyBrace(self, node: "LeftCurlyBrace") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftCurlyBrace_whitespace_after(self, node: "LeftCurlyBrace") -> None: - pass - - @mark_no_op - def leave_LeftCurlyBrace_whitespace_after(self, node: "LeftCurlyBrace") -> None: - pass - - @mark_no_op - def visit_LeftParen(self, node: "LeftParen") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftParen_whitespace_after(self, node: "LeftParen") -> None: - pass - - @mark_no_op - def leave_LeftParen_whitespace_after(self, node: "LeftParen") -> None: - pass - - @mark_no_op - def visit_LeftShift(self, node: "LeftShift") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftShift_whitespace_before(self, node: "LeftShift") -> None: - pass - - @mark_no_op - def leave_LeftShift_whitespace_before(self, node: "LeftShift") -> None: - pass - - @mark_no_op - def visit_LeftShift_whitespace_after(self, node: "LeftShift") -> None: - pass - - @mark_no_op - def leave_LeftShift_whitespace_after(self, node: "LeftShift") -> None: - pass - - @mark_no_op - def visit_LeftShiftAssign(self, node: "LeftShiftAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftShiftAssign_whitespace_before(self, node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def leave_LeftShiftAssign_whitespace_before(self, node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def visit_LeftShiftAssign_whitespace_after(self, node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def leave_LeftShiftAssign_whitespace_after(self, node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def visit_LeftSquareBracket(self, node: "LeftSquareBracket") -> Optional[bool]: - pass - - @mark_no_op - def visit_LeftSquareBracket_whitespace_after( - self, node: "LeftSquareBracket" - ) -> None: - pass - - @mark_no_op - def leave_LeftSquareBracket_whitespace_after( - self, node: "LeftSquareBracket" - ) -> None: - pass - - @mark_no_op - def visit_LessThan(self, node: "LessThan") -> Optional[bool]: - pass - - @mark_no_op - def visit_LessThan_whitespace_before(self, node: "LessThan") -> None: - pass - - @mark_no_op - def leave_LessThan_whitespace_before(self, node: "LessThan") -> None: - pass - - @mark_no_op - def visit_LessThan_whitespace_after(self, node: "LessThan") -> None: - pass - - @mark_no_op - def leave_LessThan_whitespace_after(self, node: "LessThan") -> None: - pass - - @mark_no_op - def visit_LessThanEqual(self, node: "LessThanEqual") -> Optional[bool]: - pass - - @mark_no_op - def visit_LessThanEqual_whitespace_before(self, node: "LessThanEqual") -> None: - pass - - @mark_no_op - def leave_LessThanEqual_whitespace_before(self, node: "LessThanEqual") -> None: - pass - - @mark_no_op - def visit_LessThanEqual_whitespace_after(self, node: "LessThanEqual") -> None: - pass - - @mark_no_op - def leave_LessThanEqual_whitespace_after(self, node: "LessThanEqual") -> None: - pass - - @mark_no_op - def visit_List(self, node: "List") -> Optional[bool]: - pass - - @mark_no_op - def visit_List_elements(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_elements(self, node: "List") -> None: - pass - - @mark_no_op - def visit_List_lbracket(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_lbracket(self, node: "List") -> None: - pass - - @mark_no_op - def visit_List_rbracket(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_rbracket(self, node: "List") -> None: - pass - - @mark_no_op - def visit_List_lpar(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_lpar(self, node: "List") -> None: - pass - - @mark_no_op - def visit_List_rpar(self, node: "List") -> None: - pass - - @mark_no_op - def leave_List_rpar(self, node: "List") -> None: - pass - - @mark_no_op - def visit_ListComp(self, node: "ListComp") -> Optional[bool]: - pass - - @mark_no_op - def visit_ListComp_elt(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_elt(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_for_in(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_for_in(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_lbracket(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_lbracket(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_rbracket(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_rbracket(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_lpar(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_lpar(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_ListComp_rpar(self, node: "ListComp") -> None: - pass - - @mark_no_op - def leave_ListComp_rpar(self, node: "ListComp") -> None: - pass - - @mark_no_op - def visit_Match(self, node: "Match") -> Optional[bool]: - pass - - @mark_no_op - def visit_Match_subject(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_subject(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_cases(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_cases(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_leading_lines(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_leading_lines(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_whitespace_after_match(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_whitespace_after_match(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_whitespace_before_colon(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_whitespace_before_colon(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_whitespace_after_colon(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_whitespace_after_colon(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_indent(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_indent(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_Match_footer(self, node: "Match") -> None: - pass - - @mark_no_op - def leave_Match_footer(self, node: "Match") -> None: - pass - - @mark_no_op - def visit_MatchAs(self, node: "MatchAs") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchAs_pattern(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_pattern(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_name(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_name(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_lpar(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_lpar(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchAs_rpar(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchAs_rpar(self, node: "MatchAs") -> None: - pass - - @mark_no_op - def visit_MatchCase(self, node: "MatchCase") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchCase_pattern(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_pattern(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_body(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_body(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_guard(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_guard(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_leading_lines(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_leading_lines(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: - pass - - @mark_no_op - def visit_MatchClass(self, node: "MatchClass") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchClass_cls(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_cls(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_patterns(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_patterns(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_kwds(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_kwds(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_lpar(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_lpar(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchClass_rpar(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchClass_rpar(self, node: "MatchClass") -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement(self, node: "MatchKeywordElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement_whitespace_before_equal( - self, node: "MatchKeywordElement" - ) -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_whitespace_before_equal( - self, node: "MatchKeywordElement" - ) -> None: - pass - - @mark_no_op - def visit_MatchKeywordElement_whitespace_after_equal( - self, node: "MatchKeywordElement" - ) -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement_whitespace_after_equal( - self, node: "MatchKeywordElement" - ) -> None: - pass - - @mark_no_op - def visit_MatchList(self, node: "MatchList") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchList_patterns(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_patterns(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchList_lbracket(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_lbracket(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchList_rbracket(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_rbracket(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchList_lpar(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_lpar(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchList_rpar(self, node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchList_rpar(self, node: "MatchList") -> None: - pass - - @mark_no_op - def visit_MatchMapping(self, node: "MatchMapping") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchMapping_elements(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_elements(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_lbrace(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_lbrace(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_rbrace(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_rbrace(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_rest(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_rest(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_lpar(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_lpar(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMapping_rpar(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMapping_rpar(self, node: "MatchMapping") -> None: - pass - - @mark_no_op - def visit_MatchMappingElement(self, node: "MatchMappingElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def visit_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def visit_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def visit_MatchMappingElement_whitespace_before_colon( - self, node: "MatchMappingElement" - ) -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_whitespace_before_colon( - self, node: "MatchMappingElement" - ) -> None: - pass - - @mark_no_op - def visit_MatchMappingElement_whitespace_after_colon( - self, node: "MatchMappingElement" - ) -> None: - pass - - @mark_no_op - def leave_MatchMappingElement_whitespace_after_colon( - self, node: "MatchMappingElement" - ) -> None: - pass - - @mark_no_op - def visit_MatchOr(self, node: "MatchOr") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchOr_patterns(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def leave_MatchOr_patterns(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def visit_MatchOr_lpar(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def leave_MatchOr_lpar(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def visit_MatchOr_rpar(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def leave_MatchOr_rpar(self, node: "MatchOr") -> None: - pass - - @mark_no_op - def visit_MatchOrElement(self, node: "MatchOrElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: - pass - - @mark_no_op - def leave_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: - pass - - @mark_no_op - def visit_MatchOrElement_separator(self, node: "MatchOrElement") -> None: - pass - - @mark_no_op - def leave_MatchOrElement_separator(self, node: "MatchOrElement") -> None: - pass - - @mark_no_op - def visit_MatchPattern(self, node: "MatchPattern") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchSequence(self, node: "MatchSequence") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchSequenceElement( - self, node: "MatchSequenceElement" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def leave_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def visit_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def leave_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def visit_MatchSingleton(self, node: "MatchSingleton") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchSingleton_value(self, node: "MatchSingleton") -> None: - pass - - @mark_no_op - def leave_MatchSingleton_value(self, node: "MatchSingleton") -> None: - pass - - @mark_no_op - def visit_MatchStar(self, node: "MatchStar") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchStar_name(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def leave_MatchStar_name(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def visit_MatchStar_comma(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def leave_MatchStar_comma(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def visit_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def leave_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: - pass - - @mark_no_op - def visit_MatchTuple(self, node: "MatchTuple") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchTuple_patterns(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def leave_MatchTuple_patterns(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def visit_MatchTuple_lpar(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def leave_MatchTuple_lpar(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def visit_MatchTuple_rpar(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def leave_MatchTuple_rpar(self, node: "MatchTuple") -> None: - pass - - @mark_no_op - def visit_MatchValue(self, node: "MatchValue") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatchValue_value(self, node: "MatchValue") -> None: - pass - - @mark_no_op - def leave_MatchValue_value(self, node: "MatchValue") -> None: - pass - - @mark_no_op - def visit_MatrixMultiply(self, node: "MatrixMultiply") -> Optional[bool]: - pass - - @mark_no_op - def visit_MatrixMultiply_whitespace_before(self, node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def leave_MatrixMultiply_whitespace_before(self, node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def visit_MatrixMultiply_whitespace_after(self, node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def leave_MatrixMultiply_whitespace_after(self, node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def visit_MatrixMultiplyAssign( - self, node: "MatrixMultiplyAssign" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_MatrixMultiplyAssign_whitespace_before( - self, node: "MatrixMultiplyAssign" - ) -> None: - pass - - @mark_no_op - def leave_MatrixMultiplyAssign_whitespace_before( - self, node: "MatrixMultiplyAssign" - ) -> None: - pass - - @mark_no_op - def visit_MatrixMultiplyAssign_whitespace_after( - self, node: "MatrixMultiplyAssign" - ) -> None: - pass - - @mark_no_op - def leave_MatrixMultiplyAssign_whitespace_after( - self, node: "MatrixMultiplyAssign" - ) -> None: - pass - - @mark_no_op - def visit_Minus(self, node: "Minus") -> Optional[bool]: - pass - - @mark_no_op - def visit_Minus_whitespace_after(self, node: "Minus") -> None: - pass - - @mark_no_op - def leave_Minus_whitespace_after(self, node: "Minus") -> None: - pass - - @mark_no_op - def visit_Module(self, node: "Module") -> Optional[bool]: - pass - - @mark_no_op - def visit_Module_body(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_body(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_header(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_header(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_footer(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_footer(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_encoding(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_encoding(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_default_indent(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_default_indent(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_default_newline(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_default_newline(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Module_has_trailing_newline(self, node: "Module") -> None: - pass - - @mark_no_op - def leave_Module_has_trailing_newline(self, node: "Module") -> None: - pass - - @mark_no_op - def visit_Modulo(self, node: "Modulo") -> Optional[bool]: - pass - - @mark_no_op - def visit_Modulo_whitespace_before(self, node: "Modulo") -> None: - pass - - @mark_no_op - def leave_Modulo_whitespace_before(self, node: "Modulo") -> None: - pass - - @mark_no_op - def visit_Modulo_whitespace_after(self, node: "Modulo") -> None: - pass - - @mark_no_op - def leave_Modulo_whitespace_after(self, node: "Modulo") -> None: - pass - - @mark_no_op - def visit_ModuloAssign(self, node: "ModuloAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_ModuloAssign_whitespace_before(self, node: "ModuloAssign") -> None: - pass - - @mark_no_op - def leave_ModuloAssign_whitespace_before(self, node: "ModuloAssign") -> None: - pass - - @mark_no_op - def visit_ModuloAssign_whitespace_after(self, node: "ModuloAssign") -> None: - pass - - @mark_no_op - def leave_ModuloAssign_whitespace_after(self, node: "ModuloAssign") -> None: - pass - - @mark_no_op - def visit_Multiply(self, node: "Multiply") -> Optional[bool]: - pass - - @mark_no_op - def visit_Multiply_whitespace_before(self, node: "Multiply") -> None: - pass - - @mark_no_op - def leave_Multiply_whitespace_before(self, node: "Multiply") -> None: - pass - - @mark_no_op - def visit_Multiply_whitespace_after(self, node: "Multiply") -> None: - pass - - @mark_no_op - def leave_Multiply_whitespace_after(self, node: "Multiply") -> None: - pass - - @mark_no_op - def visit_MultiplyAssign(self, node: "MultiplyAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_MultiplyAssign_whitespace_before(self, node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def leave_MultiplyAssign_whitespace_before(self, node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def visit_MultiplyAssign_whitespace_after(self, node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def leave_MultiplyAssign_whitespace_after(self, node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def visit_Name(self, node: "Name") -> Optional[bool]: - pass - - @mark_no_op - def visit_Name_value(self, node: "Name") -> None: - pass - - @mark_no_op - def leave_Name_value(self, node: "Name") -> None: - pass - - @mark_no_op - def visit_Name_lpar(self, node: "Name") -> None: - pass - - @mark_no_op - def leave_Name_lpar(self, node: "Name") -> None: - pass - - @mark_no_op - def visit_Name_rpar(self, node: "Name") -> None: - pass - - @mark_no_op - def leave_Name_rpar(self, node: "Name") -> None: - pass - - @mark_no_op - def visit_NameItem(self, node: "NameItem") -> Optional[bool]: - pass - - @mark_no_op - def visit_NameItem_name(self, node: "NameItem") -> None: - pass - - @mark_no_op - def leave_NameItem_name(self, node: "NameItem") -> None: - pass - - @mark_no_op - def visit_NameItem_comma(self, node: "NameItem") -> None: - pass - - @mark_no_op - def leave_NameItem_comma(self, node: "NameItem") -> None: - pass - - @mark_no_op - def visit_NamedExpr(self, node: "NamedExpr") -> Optional[bool]: - pass - - @mark_no_op - def visit_NamedExpr_target(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_target(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_value(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_value(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_lpar(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_lpar(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_rpar(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_rpar(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_whitespace_before_walrus(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_whitespace_before_walrus(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_NamedExpr_whitespace_after_walrus(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_NamedExpr_whitespace_after_walrus(self, node: "NamedExpr") -> None: - pass - - @mark_no_op - def visit_Newline(self, node: "Newline") -> Optional[bool]: - pass - - @mark_no_op - def visit_Newline_value(self, node: "Newline") -> None: - pass - - @mark_no_op - def leave_Newline_value(self, node: "Newline") -> None: - pass - - @mark_no_op - def visit_Nonlocal(self, node: "Nonlocal") -> Optional[bool]: - pass - - @mark_no_op - def visit_Nonlocal_names(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def leave_Nonlocal_names(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def visit_Nonlocal_whitespace_after_nonlocal(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def leave_Nonlocal_whitespace_after_nonlocal(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def visit_Nonlocal_semicolon(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def leave_Nonlocal_semicolon(self, node: "Nonlocal") -> None: - pass - - @mark_no_op - def visit_Not(self, node: "Not") -> Optional[bool]: - pass - - @mark_no_op - def visit_Not_whitespace_after(self, node: "Not") -> None: - pass - - @mark_no_op - def leave_Not_whitespace_after(self, node: "Not") -> None: - pass - - @mark_no_op - def visit_NotEqual(self, node: "NotEqual") -> Optional[bool]: - pass - - @mark_no_op - def visit_NotEqual_value(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def leave_NotEqual_value(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def visit_NotEqual_whitespace_before(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def leave_NotEqual_whitespace_before(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def visit_NotEqual_whitespace_after(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def leave_NotEqual_whitespace_after(self, node: "NotEqual") -> None: - pass - - @mark_no_op - def visit_NotIn(self, node: "NotIn") -> Optional[bool]: - pass - - @mark_no_op - def visit_NotIn_whitespace_before(self, node: "NotIn") -> None: - pass - - @mark_no_op - def leave_NotIn_whitespace_before(self, node: "NotIn") -> None: - pass - - @mark_no_op - def visit_NotIn_whitespace_between(self, node: "NotIn") -> None: - pass - - @mark_no_op - def leave_NotIn_whitespace_between(self, node: "NotIn") -> None: - pass - - @mark_no_op - def visit_NotIn_whitespace_after(self, node: "NotIn") -> None: - pass - - @mark_no_op - def leave_NotIn_whitespace_after(self, node: "NotIn") -> None: - pass - - @mark_no_op - def visit_Or(self, node: "Or") -> Optional[bool]: - pass - - @mark_no_op - def visit_Or_whitespace_before(self, node: "Or") -> None: - pass - - @mark_no_op - def leave_Or_whitespace_before(self, node: "Or") -> None: - pass - - @mark_no_op - def visit_Or_whitespace_after(self, node: "Or") -> None: - pass - - @mark_no_op - def leave_Or_whitespace_after(self, node: "Or") -> None: - pass - - @mark_no_op - def visit_Param(self, node: "Param") -> Optional[bool]: - pass - - @mark_no_op - def visit_Param_name(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_name(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_annotation(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_annotation(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_equal(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_equal(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_default(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_default(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_comma(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_comma(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_star(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_star(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_whitespace_after_star(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_whitespace_after_star(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_Param_whitespace_after_param(self, node: "Param") -> None: - pass - - @mark_no_op - def leave_Param_whitespace_after_param(self, node: "Param") -> None: - pass - - @mark_no_op - def visit_ParamSlash(self, node: "ParamSlash") -> Optional[bool]: - pass - - @mark_no_op - def visit_ParamSlash_comma(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def leave_ParamSlash_comma(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def visit_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def leave_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def visit_ParamSpec(self, node: "ParamSpec") -> Optional[bool]: - pass - - @mark_no_op - def visit_ParamSpec_name(self, node: "ParamSpec") -> None: - pass - - @mark_no_op - def leave_ParamSpec_name(self, node: "ParamSpec") -> None: - pass - - @mark_no_op - def visit_ParamSpec_whitespace_after_star(self, node: "ParamSpec") -> None: - pass - - @mark_no_op - def leave_ParamSpec_whitespace_after_star(self, node: "ParamSpec") -> None: - pass - - @mark_no_op - def visit_ParamStar(self, node: "ParamStar") -> Optional[bool]: - pass - - @mark_no_op - def visit_ParamStar_comma(self, node: "ParamStar") -> None: - pass - - @mark_no_op - def leave_ParamStar_comma(self, node: "ParamStar") -> None: - pass - - @mark_no_op - def visit_Parameters(self, node: "Parameters") -> Optional[bool]: - pass - - @mark_no_op - def visit_Parameters_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_star_arg(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_star_arg(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_kwonly_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_kwonly_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_star_kwarg(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_star_kwarg(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_posonly_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_posonly_params(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_Parameters_posonly_ind(self, node: "Parameters") -> None: - pass - - @mark_no_op - def leave_Parameters_posonly_ind(self, node: "Parameters") -> None: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace( - self, node: "ParenthesizedWhitespace" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace_first_line( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace_first_line( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace_empty_lines( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace_empty_lines( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace_indent( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace_indent( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def visit_ParenthesizedWhitespace_last_line( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace_last_line( - self, node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def visit_Pass(self, node: "Pass") -> Optional[bool]: - pass - - @mark_no_op - def visit_Pass_semicolon(self, node: "Pass") -> None: - pass - - @mark_no_op - def leave_Pass_semicolon(self, node: "Pass") -> None: - pass - - @mark_no_op - def visit_Plus(self, node: "Plus") -> Optional[bool]: - pass - - @mark_no_op - def visit_Plus_whitespace_after(self, node: "Plus") -> None: - pass - - @mark_no_op - def leave_Plus_whitespace_after(self, node: "Plus") -> None: - pass - - @mark_no_op - def visit_Power(self, node: "Power") -> Optional[bool]: - pass - - @mark_no_op - def visit_Power_whitespace_before(self, node: "Power") -> None: - pass - - @mark_no_op - def leave_Power_whitespace_before(self, node: "Power") -> None: - pass - - @mark_no_op - def visit_Power_whitespace_after(self, node: "Power") -> None: - pass - - @mark_no_op - def leave_Power_whitespace_after(self, node: "Power") -> None: - pass - - @mark_no_op - def visit_PowerAssign(self, node: "PowerAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_PowerAssign_whitespace_before(self, node: "PowerAssign") -> None: - pass - - @mark_no_op - def leave_PowerAssign_whitespace_before(self, node: "PowerAssign") -> None: - pass - - @mark_no_op - def visit_PowerAssign_whitespace_after(self, node: "PowerAssign") -> None: - pass - - @mark_no_op - def leave_PowerAssign_whitespace_after(self, node: "PowerAssign") -> None: - pass - - @mark_no_op - def visit_Raise(self, node: "Raise") -> Optional[bool]: - pass - - @mark_no_op - def visit_Raise_exc(self, node: "Raise") -> None: - pass - - @mark_no_op - def leave_Raise_exc(self, node: "Raise") -> None: - pass - - @mark_no_op - def visit_Raise_cause(self, node: "Raise") -> None: - pass - - @mark_no_op - def leave_Raise_cause(self, node: "Raise") -> None: - pass - - @mark_no_op - def visit_Raise_whitespace_after_raise(self, node: "Raise") -> None: - pass - - @mark_no_op - def leave_Raise_whitespace_after_raise(self, node: "Raise") -> None: - pass - - @mark_no_op - def visit_Raise_semicolon(self, node: "Raise") -> None: - pass - - @mark_no_op - def leave_Raise_semicolon(self, node: "Raise") -> None: - pass - - @mark_no_op - def visit_Return(self, node: "Return") -> Optional[bool]: - pass - - @mark_no_op - def visit_Return_value(self, node: "Return") -> None: - pass - - @mark_no_op - def leave_Return_value(self, node: "Return") -> None: - pass - - @mark_no_op - def visit_Return_whitespace_after_return(self, node: "Return") -> None: - pass - - @mark_no_op - def leave_Return_whitespace_after_return(self, node: "Return") -> None: - pass - - @mark_no_op - def visit_Return_semicolon(self, node: "Return") -> None: - pass - - @mark_no_op - def leave_Return_semicolon(self, node: "Return") -> None: - pass - - @mark_no_op - def visit_RightCurlyBrace(self, node: "RightCurlyBrace") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightCurlyBrace_whitespace_before(self, node: "RightCurlyBrace") -> None: - pass - - @mark_no_op - def leave_RightCurlyBrace_whitespace_before(self, node: "RightCurlyBrace") -> None: - pass - - @mark_no_op - def visit_RightParen(self, node: "RightParen") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightParen_whitespace_before(self, node: "RightParen") -> None: - pass - - @mark_no_op - def leave_RightParen_whitespace_before(self, node: "RightParen") -> None: - pass - - @mark_no_op - def visit_RightShift(self, node: "RightShift") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightShift_whitespace_before(self, node: "RightShift") -> None: - pass - - @mark_no_op - def leave_RightShift_whitespace_before(self, node: "RightShift") -> None: - pass - - @mark_no_op - def visit_RightShift_whitespace_after(self, node: "RightShift") -> None: - pass - - @mark_no_op - def leave_RightShift_whitespace_after(self, node: "RightShift") -> None: - pass - - @mark_no_op - def visit_RightShiftAssign(self, node: "RightShiftAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightShiftAssign_whitespace_before( - self, node: "RightShiftAssign" - ) -> None: - pass - - @mark_no_op - def leave_RightShiftAssign_whitespace_before( - self, node: "RightShiftAssign" - ) -> None: - pass - - @mark_no_op - def visit_RightShiftAssign_whitespace_after(self, node: "RightShiftAssign") -> None: - pass - - @mark_no_op - def leave_RightShiftAssign_whitespace_after(self, node: "RightShiftAssign") -> None: - pass - - @mark_no_op - def visit_RightSquareBracket(self, node: "RightSquareBracket") -> Optional[bool]: - pass - - @mark_no_op - def visit_RightSquareBracket_whitespace_before( - self, node: "RightSquareBracket" - ) -> None: - pass - - @mark_no_op - def leave_RightSquareBracket_whitespace_before( - self, node: "RightSquareBracket" - ) -> None: - pass - - @mark_no_op - def visit_Semicolon(self, node: "Semicolon") -> Optional[bool]: - pass - - @mark_no_op - def visit_Semicolon_whitespace_before(self, node: "Semicolon") -> None: - pass - - @mark_no_op - def leave_Semicolon_whitespace_before(self, node: "Semicolon") -> None: - pass - - @mark_no_op - def visit_Semicolon_whitespace_after(self, node: "Semicolon") -> None: - pass - - @mark_no_op - def leave_Semicolon_whitespace_after(self, node: "Semicolon") -> None: - pass - - @mark_no_op - def visit_Set(self, node: "Set") -> Optional[bool]: - pass - - @mark_no_op - def visit_Set_elements(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_elements(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_Set_lbrace(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_lbrace(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_Set_rbrace(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_rbrace(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_Set_lpar(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_lpar(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_Set_rpar(self, node: "Set") -> None: - pass - - @mark_no_op - def leave_Set_rpar(self, node: "Set") -> None: - pass - - @mark_no_op - def visit_SetComp(self, node: "SetComp") -> Optional[bool]: - pass - - @mark_no_op - def visit_SetComp_elt(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_elt(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_for_in(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_for_in(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_lbrace(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_lbrace(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_rbrace(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_rbrace(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_lpar(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_lpar(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SetComp_rpar(self, node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SetComp_rpar(self, node: "SetComp") -> None: - pass - - @mark_no_op - def visit_SimpleStatementLine(self, node: "SimpleStatementLine") -> Optional[bool]: - pass - - @mark_no_op - def visit_SimpleStatementLine_body(self, node: "SimpleStatementLine") -> None: - pass - - @mark_no_op - def leave_SimpleStatementLine_body(self, node: "SimpleStatementLine") -> None: - pass - - @mark_no_op - def visit_SimpleStatementLine_leading_lines( - self, node: "SimpleStatementLine" - ) -> None: - pass - - @mark_no_op - def leave_SimpleStatementLine_leading_lines( - self, node: "SimpleStatementLine" - ) -> None: - pass - - @mark_no_op - def visit_SimpleStatementLine_trailing_whitespace( - self, node: "SimpleStatementLine" - ) -> None: - pass - - @mark_no_op - def leave_SimpleStatementLine_trailing_whitespace( - self, node: "SimpleStatementLine" - ) -> None: - pass - - @mark_no_op - def visit_SimpleStatementSuite( - self, node: "SimpleStatementSuite" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_SimpleStatementSuite_body(self, node: "SimpleStatementSuite") -> None: - pass - - @mark_no_op - def leave_SimpleStatementSuite_body(self, node: "SimpleStatementSuite") -> None: - pass - - @mark_no_op - def visit_SimpleStatementSuite_leading_whitespace( - self, node: "SimpleStatementSuite" - ) -> None: - pass - - @mark_no_op - def leave_SimpleStatementSuite_leading_whitespace( - self, node: "SimpleStatementSuite" - ) -> None: - pass - - @mark_no_op - def visit_SimpleStatementSuite_trailing_whitespace( - self, node: "SimpleStatementSuite" - ) -> None: - pass - - @mark_no_op - def leave_SimpleStatementSuite_trailing_whitespace( - self, node: "SimpleStatementSuite" - ) -> None: - pass - - @mark_no_op - def visit_SimpleString(self, node: "SimpleString") -> Optional[bool]: - pass - - @mark_no_op - def visit_SimpleString_value(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def leave_SimpleString_value(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def visit_SimpleString_lpar(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def leave_SimpleString_lpar(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def visit_SimpleString_rpar(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def leave_SimpleString_rpar(self, node: "SimpleString") -> None: - pass - - @mark_no_op - def visit_SimpleWhitespace(self, node: "SimpleWhitespace") -> Optional[bool]: - pass - - @mark_no_op - def visit_SimpleWhitespace_value(self, node: "SimpleWhitespace") -> None: - pass - - @mark_no_op - def leave_SimpleWhitespace_value(self, node: "SimpleWhitespace") -> None: - pass - - @mark_no_op - def visit_Slice(self, node: "Slice") -> Optional[bool]: - pass - - @mark_no_op - def visit_Slice_lower(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_lower(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_Slice_upper(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_upper(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_Slice_step(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_step(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_Slice_first_colon(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_first_colon(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_Slice_second_colon(self, node: "Slice") -> None: - pass - - @mark_no_op - def leave_Slice_second_colon(self, node: "Slice") -> None: - pass - - @mark_no_op - def visit_StarredDictElement(self, node: "StarredDictElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_StarredDictElement_value(self, node: "StarredDictElement") -> None: - pass - - @mark_no_op - def leave_StarredDictElement_value(self, node: "StarredDictElement") -> None: - pass - - @mark_no_op - def visit_StarredDictElement_comma(self, node: "StarredDictElement") -> None: - pass - - @mark_no_op - def leave_StarredDictElement_comma(self, node: "StarredDictElement") -> None: - pass - - @mark_no_op - def visit_StarredDictElement_whitespace_before_value( - self, node: "StarredDictElement" - ) -> None: - pass - - @mark_no_op - def leave_StarredDictElement_whitespace_before_value( - self, node: "StarredDictElement" - ) -> None: - pass - - @mark_no_op - def visit_StarredElement(self, node: "StarredElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_StarredElement_value(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_StarredElement_value(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def visit_StarredElement_comma(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_StarredElement_comma(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def visit_StarredElement_lpar(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_StarredElement_lpar(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def visit_StarredElement_rpar(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_StarredElement_rpar(self, node: "StarredElement") -> None: - pass - - @mark_no_op - def visit_StarredElement_whitespace_before_value( - self, node: "StarredElement" - ) -> None: - pass - - @mark_no_op - def leave_StarredElement_whitespace_before_value( - self, node: "StarredElement" - ) -> None: - pass - - @mark_no_op - def visit_Subscript(self, node: "Subscript") -> Optional[bool]: - pass - - @mark_no_op - def visit_Subscript_value(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_value(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_slice(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_slice(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_lbracket(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_lbracket(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_rbracket(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_rbracket(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_lpar(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_lpar(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_rpar(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_rpar(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_Subscript_whitespace_after_value(self, node: "Subscript") -> None: - pass - - @mark_no_op - def leave_Subscript_whitespace_after_value(self, node: "Subscript") -> None: - pass - - @mark_no_op - def visit_SubscriptElement(self, node: "SubscriptElement") -> Optional[bool]: - pass - - @mark_no_op - def visit_SubscriptElement_slice(self, node: "SubscriptElement") -> None: - pass - - @mark_no_op - def leave_SubscriptElement_slice(self, node: "SubscriptElement") -> None: - pass - - @mark_no_op - def visit_SubscriptElement_comma(self, node: "SubscriptElement") -> None: - pass - - @mark_no_op - def leave_SubscriptElement_comma(self, node: "SubscriptElement") -> None: - pass - - @mark_no_op - def visit_Subtract(self, node: "Subtract") -> Optional[bool]: - pass - - @mark_no_op - def visit_Subtract_whitespace_before(self, node: "Subtract") -> None: - pass - - @mark_no_op - def leave_Subtract_whitespace_before(self, node: "Subtract") -> None: - pass - - @mark_no_op - def visit_Subtract_whitespace_after(self, node: "Subtract") -> None: - pass - - @mark_no_op - def leave_Subtract_whitespace_after(self, node: "Subtract") -> None: - pass - - @mark_no_op - def visit_SubtractAssign(self, node: "SubtractAssign") -> Optional[bool]: - pass - - @mark_no_op - def visit_SubtractAssign_whitespace_before(self, node: "SubtractAssign") -> None: - pass - - @mark_no_op - def leave_SubtractAssign_whitespace_before(self, node: "SubtractAssign") -> None: - pass - - @mark_no_op - def visit_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: - pass - - @mark_no_op - def leave_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: - pass - - @mark_no_op - def visit_TrailingWhitespace(self, node: "TrailingWhitespace") -> Optional[bool]: - pass - - @mark_no_op - def visit_TrailingWhitespace_whitespace(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def leave_TrailingWhitespace_whitespace(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def visit_TrailingWhitespace_comment(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def leave_TrailingWhitespace_comment(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def visit_TrailingWhitespace_newline(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def leave_TrailingWhitespace_newline(self, node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def visit_Try(self, node: "Try") -> Optional[bool]: - pass - - @mark_no_op - def visit_Try_body(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_body(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_handlers(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_handlers(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_orelse(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_orelse(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_finalbody(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_finalbody(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_leading_lines(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_leading_lines(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_Try_whitespace_before_colon(self, node: "Try") -> None: - pass - - @mark_no_op - def leave_Try_whitespace_before_colon(self, node: "Try") -> None: - pass - - @mark_no_op - def visit_TryStar(self, node: "TryStar") -> Optional[bool]: - pass - - @mark_no_op - def visit_TryStar_body(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_body(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_handlers(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_handlers(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_orelse(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_orelse(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_finalbody(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_finalbody(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_leading_lines(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_leading_lines(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: - pass - - @mark_no_op - def leave_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: - pass - - @mark_no_op - def visit_Tuple(self, node: "Tuple") -> Optional[bool]: - pass - - @mark_no_op - def visit_Tuple_elements(self, node: "Tuple") -> None: - pass - - @mark_no_op - def leave_Tuple_elements(self, node: "Tuple") -> None: - pass - - @mark_no_op - def visit_Tuple_lpar(self, node: "Tuple") -> None: - pass - - @mark_no_op - def leave_Tuple_lpar(self, node: "Tuple") -> None: - pass - - @mark_no_op - def visit_Tuple_rpar(self, node: "Tuple") -> None: - pass - - @mark_no_op - def leave_Tuple_rpar(self, node: "Tuple") -> None: - pass - - @mark_no_op - def visit_TypeAlias(self, node: "TypeAlias") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeAlias_name(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_name(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_value(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_value(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_type_parameters(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_type_parameters(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_whitespace_after_type(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_whitespace_after_type(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_whitespace_after_name(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_whitespace_after_name(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_whitespace_after_type_parameters( - self, node: "TypeAlias" - ) -> None: - pass - - @mark_no_op - def leave_TypeAlias_whitespace_after_type_parameters( - self, node: "TypeAlias" - ) -> None: - pass - - @mark_no_op - def visit_TypeAlias_whitespace_after_equals(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_whitespace_after_equals(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_semicolon(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_semicolon(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeParam(self, node: "TypeParam") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeParam_param(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParam_param(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def visit_TypeParam_comma(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParam_comma(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def visit_TypeParameters(self, node: "TypeParameters") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeParameters_params(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def leave_TypeParameters_params(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def visit_TypeParameters_lbracket(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def leave_TypeParameters_lbracket(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def visit_TypeParameters_rbracket(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def leave_TypeParameters_rbracket(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def visit_TypeVar(self, node: "TypeVar") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeVar_name(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def leave_TypeVar_name(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def visit_TypeVar_bound(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def leave_TypeVar_bound(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def visit_TypeVar_colon(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def leave_TypeVar_colon(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def visit_TypeVarTuple(self, node: "TypeVarTuple") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeVarTuple_name(self, node: "TypeVarTuple") -> None: - pass - - @mark_no_op - def leave_TypeVarTuple_name(self, node: "TypeVarTuple") -> None: - pass - - @mark_no_op - def visit_TypeVarTuple_whitespace_after_star(self, node: "TypeVarTuple") -> None: - pass - - @mark_no_op - def leave_TypeVarTuple_whitespace_after_star(self, node: "TypeVarTuple") -> None: - pass - - @mark_no_op - def visit_UnaryOperation(self, node: "UnaryOperation") -> Optional[bool]: - pass - - @mark_no_op - def visit_UnaryOperation_operator(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_UnaryOperation_operator(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def visit_UnaryOperation_expression(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_UnaryOperation_expression(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def visit_UnaryOperation_lpar(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_UnaryOperation_lpar(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def visit_UnaryOperation_rpar(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_UnaryOperation_rpar(self, node: "UnaryOperation") -> None: - pass - - @mark_no_op - def visit_While(self, node: "While") -> Optional[bool]: - pass - - @mark_no_op - def visit_While_test(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_test(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_body(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_body(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_orelse(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_orelse(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_leading_lines(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_leading_lines(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_whitespace_after_while(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_whitespace_after_while(self, node: "While") -> None: - pass - - @mark_no_op - def visit_While_whitespace_before_colon(self, node: "While") -> None: - pass - - @mark_no_op - def leave_While_whitespace_before_colon(self, node: "While") -> None: - pass - - @mark_no_op - def visit_With(self, node: "With") -> Optional[bool]: - pass - - @mark_no_op - def visit_With_items(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_items(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_body(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_body(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_asynchronous(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_asynchronous(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_leading_lines(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_leading_lines(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_lpar(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_lpar(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_rpar(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_rpar(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_whitespace_after_with(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_whitespace_after_with(self, node: "With") -> None: - pass - - @mark_no_op - def visit_With_whitespace_before_colon(self, node: "With") -> None: - pass - - @mark_no_op - def leave_With_whitespace_before_colon(self, node: "With") -> None: - pass - - @mark_no_op - def visit_WithItem(self, node: "WithItem") -> Optional[bool]: - pass - - @mark_no_op - def visit_WithItem_item(self, node: "WithItem") -> None: - pass - - @mark_no_op - def leave_WithItem_item(self, node: "WithItem") -> None: - pass - - @mark_no_op - def visit_WithItem_asname(self, node: "WithItem") -> None: - pass - - @mark_no_op - def leave_WithItem_asname(self, node: "WithItem") -> None: - pass - - @mark_no_op - def visit_WithItem_comma(self, node: "WithItem") -> None: - pass - - @mark_no_op - def leave_WithItem_comma(self, node: "WithItem") -> None: - pass - - @mark_no_op - def visit_Yield(self, node: "Yield") -> Optional[bool]: - pass - - @mark_no_op - def visit_Yield_value(self, node: "Yield") -> None: - pass - - @mark_no_op - def leave_Yield_value(self, node: "Yield") -> None: - pass - - @mark_no_op - def visit_Yield_lpar(self, node: "Yield") -> None: - pass - - @mark_no_op - def leave_Yield_lpar(self, node: "Yield") -> None: - pass - - @mark_no_op - def visit_Yield_rpar(self, node: "Yield") -> None: - pass - - @mark_no_op - def leave_Yield_rpar(self, node: "Yield") -> None: - pass - - @mark_no_op - def visit_Yield_whitespace_after_yield(self, node: "Yield") -> None: - pass - - @mark_no_op - def leave_Yield_whitespace_after_yield(self, node: "Yield") -> None: - pass - - -class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): - @mark_no_op - def leave_Add(self, original_node: "Add") -> None: - pass - - @mark_no_op - def leave_AddAssign(self, original_node: "AddAssign") -> None: - pass - - @mark_no_op - def leave_And(self, original_node: "And") -> None: - pass - - @mark_no_op - def leave_AnnAssign(self, original_node: "AnnAssign") -> None: - pass - - @mark_no_op - def leave_Annotation(self, original_node: "Annotation") -> None: - pass - - @mark_no_op - def leave_Arg(self, original_node: "Arg") -> None: - pass - - @mark_no_op - def leave_AsName(self, original_node: "AsName") -> None: - pass - - @mark_no_op - def leave_Assert(self, original_node: "Assert") -> None: - pass - - @mark_no_op - def leave_Assign(self, original_node: "Assign") -> None: - pass - - @mark_no_op - def leave_AssignEqual(self, original_node: "AssignEqual") -> None: - pass - - @mark_no_op - def leave_AssignTarget(self, original_node: "AssignTarget") -> None: - pass - - @mark_no_op - def leave_Asynchronous(self, original_node: "Asynchronous") -> None: - pass - - @mark_no_op - def leave_Attribute(self, original_node: "Attribute") -> None: - pass - - @mark_no_op - def leave_AugAssign(self, original_node: "AugAssign") -> None: - pass - - @mark_no_op - def leave_Await(self, original_node: "Await") -> None: - pass - - @mark_no_op - def leave_BinaryOperation(self, original_node: "BinaryOperation") -> None: - pass - - @mark_no_op - def leave_BitAnd(self, original_node: "BitAnd") -> None: - pass - - @mark_no_op - def leave_BitAndAssign(self, original_node: "BitAndAssign") -> None: - pass - - @mark_no_op - def leave_BitInvert(self, original_node: "BitInvert") -> None: - pass - - @mark_no_op - def leave_BitOr(self, original_node: "BitOr") -> None: - pass - - @mark_no_op - def leave_BitOrAssign(self, original_node: "BitOrAssign") -> None: - pass - - @mark_no_op - def leave_BitXor(self, original_node: "BitXor") -> None: - pass - - @mark_no_op - def leave_BitXorAssign(self, original_node: "BitXorAssign") -> None: - pass - - @mark_no_op - def leave_BooleanOperation(self, original_node: "BooleanOperation") -> None: - pass - - @mark_no_op - def leave_Break(self, original_node: "Break") -> None: - pass - - @mark_no_op - def leave_Call(self, original_node: "Call") -> None: - pass - - @mark_no_op - def leave_ClassDef(self, original_node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_Colon(self, original_node: "Colon") -> None: - pass - - @mark_no_op - def leave_Comma(self, original_node: "Comma") -> None: - pass - - @mark_no_op - def leave_Comment(self, original_node: "Comment") -> None: - pass - - @mark_no_op - def leave_CompFor(self, original_node: "CompFor") -> None: - pass - - @mark_no_op - def leave_CompIf(self, original_node: "CompIf") -> None: - pass - - @mark_no_op - def leave_Comparison(self, original_node: "Comparison") -> None: - pass - - @mark_no_op - def leave_ComparisonTarget(self, original_node: "ComparisonTarget") -> None: - pass - - @mark_no_op - def leave_ConcatenatedString(self, original_node: "ConcatenatedString") -> None: - pass - - @mark_no_op - def leave_Continue(self, original_node: "Continue") -> None: - pass - - @mark_no_op - def leave_Decorator(self, original_node: "Decorator") -> None: - pass - - @mark_no_op - def leave_Del(self, original_node: "Del") -> None: - pass - - @mark_no_op - def leave_Dict(self, original_node: "Dict") -> None: - pass - - @mark_no_op - def leave_DictComp(self, original_node: "DictComp") -> None: - pass - - @mark_no_op - def leave_DictElement(self, original_node: "DictElement") -> None: - pass - - @mark_no_op - def leave_Divide(self, original_node: "Divide") -> None: - pass - - @mark_no_op - def leave_DivideAssign(self, original_node: "DivideAssign") -> None: - pass - - @mark_no_op - def leave_Dot(self, original_node: "Dot") -> None: - pass - - @mark_no_op - def leave_Element(self, original_node: "Element") -> None: - pass - - @mark_no_op - def leave_Ellipsis(self, original_node: "Ellipsis") -> None: - pass - - @mark_no_op - def leave_Else(self, original_node: "Else") -> None: - pass - - @mark_no_op - def leave_EmptyLine(self, original_node: "EmptyLine") -> None: - pass - - @mark_no_op - def leave_Equal(self, original_node: "Equal") -> None: - pass - - @mark_no_op - def leave_ExceptHandler(self, original_node: "ExceptHandler") -> None: - pass - - @mark_no_op - def leave_ExceptStarHandler(self, original_node: "ExceptStarHandler") -> None: - pass - - @mark_no_op - def leave_Expr(self, original_node: "Expr") -> None: - pass - - @mark_no_op - def leave_Finally(self, original_node: "Finally") -> None: - pass - - @mark_no_op - def leave_Float(self, original_node: "Float") -> None: - pass - - @mark_no_op - def leave_FloorDivide(self, original_node: "FloorDivide") -> None: - pass - - @mark_no_op - def leave_FloorDivideAssign(self, original_node: "FloorDivideAssign") -> None: - pass - - @mark_no_op - def leave_For(self, original_node: "For") -> None: - pass - - @mark_no_op - def leave_FormattedString(self, original_node: "FormattedString") -> None: - pass - - @mark_no_op - def leave_FormattedStringExpression( - self, original_node: "FormattedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_FormattedStringText(self, original_node: "FormattedStringText") -> None: - pass - - @mark_no_op - def leave_From(self, original_node: "From") -> None: - pass - - @mark_no_op - def leave_FunctionDef(self, original_node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_GeneratorExp(self, original_node: "GeneratorExp") -> None: - pass - - @mark_no_op - def leave_Global(self, original_node: "Global") -> None: - pass - - @mark_no_op - def leave_GreaterThan(self, original_node: "GreaterThan") -> None: - pass - - @mark_no_op - def leave_GreaterThanEqual(self, original_node: "GreaterThanEqual") -> None: - pass - - @mark_no_op - def leave_If(self, original_node: "If") -> None: - pass - - @mark_no_op - def leave_IfExp(self, original_node: "IfExp") -> None: - pass - - @mark_no_op - def leave_Imaginary(self, original_node: "Imaginary") -> None: - pass - - @mark_no_op - def leave_Import(self, original_node: "Import") -> None: - pass - - @mark_no_op - def leave_ImportAlias(self, original_node: "ImportAlias") -> None: - pass - - @mark_no_op - def leave_ImportFrom(self, original_node: "ImportFrom") -> None: - pass - - @mark_no_op - def leave_ImportStar(self, original_node: "ImportStar") -> None: - pass - - @mark_no_op - def leave_In(self, original_node: "In") -> None: - pass - - @mark_no_op - def leave_IndentedBlock(self, original_node: "IndentedBlock") -> None: - pass - - @mark_no_op - def leave_Index(self, original_node: "Index") -> None: - pass - - @mark_no_op - def leave_Integer(self, original_node: "Integer") -> None: - pass - - @mark_no_op - def leave_Is(self, original_node: "Is") -> None: - pass - - @mark_no_op - def leave_IsNot(self, original_node: "IsNot") -> None: - pass - - @mark_no_op - def leave_Lambda(self, original_node: "Lambda") -> None: - pass - - @mark_no_op - def leave_LeftCurlyBrace(self, original_node: "LeftCurlyBrace") -> None: - pass - - @mark_no_op - def leave_LeftParen(self, original_node: "LeftParen") -> None: - pass - - @mark_no_op - def leave_LeftShift(self, original_node: "LeftShift") -> None: - pass - - @mark_no_op - def leave_LeftShiftAssign(self, original_node: "LeftShiftAssign") -> None: - pass - - @mark_no_op - def leave_LeftSquareBracket(self, original_node: "LeftSquareBracket") -> None: - pass - - @mark_no_op - def leave_LessThan(self, original_node: "LessThan") -> None: - pass - - @mark_no_op - def leave_LessThanEqual(self, original_node: "LessThanEqual") -> None: - pass - - @mark_no_op - def leave_List(self, original_node: "List") -> None: - pass - - @mark_no_op - def leave_ListComp(self, original_node: "ListComp") -> None: - pass - - @mark_no_op - def leave_Match(self, original_node: "Match") -> None: - pass - - @mark_no_op - def leave_MatchAs(self, original_node: "MatchAs") -> None: - pass - - @mark_no_op - def leave_MatchCase(self, original_node: "MatchCase") -> None: - pass - - @mark_no_op - def leave_MatchClass(self, original_node: "MatchClass") -> None: - pass - - @mark_no_op - def leave_MatchKeywordElement(self, original_node: "MatchKeywordElement") -> None: - pass - - @mark_no_op - def leave_MatchList(self, original_node: "MatchList") -> None: - pass - - @mark_no_op - def leave_MatchMapping(self, original_node: "MatchMapping") -> None: - pass - - @mark_no_op - def leave_MatchMappingElement(self, original_node: "MatchMappingElement") -> None: - pass - - @mark_no_op - def leave_MatchOr(self, original_node: "MatchOr") -> None: - pass - - @mark_no_op - def leave_MatchOrElement(self, original_node: "MatchOrElement") -> None: - pass - - @mark_no_op - def leave_MatchPattern(self, original_node: "MatchPattern") -> None: - pass - - @mark_no_op - def leave_MatchSequence(self, original_node: "MatchSequence") -> None: - pass - - @mark_no_op - def leave_MatchSequenceElement(self, original_node: "MatchSequenceElement") -> None: - pass - - @mark_no_op - def leave_MatchSingleton(self, original_node: "MatchSingleton") -> None: - pass - - @mark_no_op - def leave_MatchStar(self, original_node: "MatchStar") -> None: - pass - - @mark_no_op - def leave_MatchTuple(self, original_node: "MatchTuple") -> None: - pass - - @mark_no_op - def leave_MatchValue(self, original_node: "MatchValue") -> None: - pass - - @mark_no_op - def leave_MatrixMultiply(self, original_node: "MatrixMultiply") -> None: - pass - - @mark_no_op - def leave_MatrixMultiplyAssign(self, original_node: "MatrixMultiplyAssign") -> None: - pass - - @mark_no_op - def leave_Minus(self, original_node: "Minus") -> None: - pass - - @mark_no_op - def leave_Module(self, original_node: "Module") -> None: - pass - - @mark_no_op - def leave_Modulo(self, original_node: "Modulo") -> None: - pass - - @mark_no_op - def leave_ModuloAssign(self, original_node: "ModuloAssign") -> None: - pass - - @mark_no_op - def leave_Multiply(self, original_node: "Multiply") -> None: - pass - - @mark_no_op - def leave_MultiplyAssign(self, original_node: "MultiplyAssign") -> None: - pass - - @mark_no_op - def leave_Name(self, original_node: "Name") -> None: - pass - - @mark_no_op - def leave_NameItem(self, original_node: "NameItem") -> None: - pass - - @mark_no_op - def leave_NamedExpr(self, original_node: "NamedExpr") -> None: - pass - - @mark_no_op - def leave_Newline(self, original_node: "Newline") -> None: - pass - - @mark_no_op - def leave_Nonlocal(self, original_node: "Nonlocal") -> None: - pass - - @mark_no_op - def leave_Not(self, original_node: "Not") -> None: - pass - - @mark_no_op - def leave_NotEqual(self, original_node: "NotEqual") -> None: - pass - - @mark_no_op - def leave_NotIn(self, original_node: "NotIn") -> None: - pass - - @mark_no_op - def leave_Or(self, original_node: "Or") -> None: - pass - - @mark_no_op - def leave_Param(self, original_node: "Param") -> None: - pass - - @mark_no_op - def leave_ParamSlash(self, original_node: "ParamSlash") -> None: - pass - - @mark_no_op - def leave_ParamSpec(self, original_node: "ParamSpec") -> None: - pass - - @mark_no_op - def leave_ParamStar(self, original_node: "ParamStar") -> None: - pass - - @mark_no_op - def leave_Parameters(self, original_node: "Parameters") -> None: - pass - - @mark_no_op - def leave_ParenthesizedWhitespace( - self, original_node: "ParenthesizedWhitespace" - ) -> None: - pass - - @mark_no_op - def leave_Pass(self, original_node: "Pass") -> None: - pass - - @mark_no_op - def leave_Plus(self, original_node: "Plus") -> None: - pass - - @mark_no_op - def leave_Power(self, original_node: "Power") -> None: - pass - - @mark_no_op - def leave_PowerAssign(self, original_node: "PowerAssign") -> None: - pass - - @mark_no_op - def leave_Raise(self, original_node: "Raise") -> None: - pass - - @mark_no_op - def leave_Return(self, original_node: "Return") -> None: - pass - - @mark_no_op - def leave_RightCurlyBrace(self, original_node: "RightCurlyBrace") -> None: - pass - - @mark_no_op - def leave_RightParen(self, original_node: "RightParen") -> None: - pass - - @mark_no_op - def leave_RightShift(self, original_node: "RightShift") -> None: - pass - - @mark_no_op - def leave_RightShiftAssign(self, original_node: "RightShiftAssign") -> None: - pass - - @mark_no_op - def leave_RightSquareBracket(self, original_node: "RightSquareBracket") -> None: - pass - - @mark_no_op - def leave_Semicolon(self, original_node: "Semicolon") -> None: - pass - - @mark_no_op - def leave_Set(self, original_node: "Set") -> None: - pass - - @mark_no_op - def leave_SetComp(self, original_node: "SetComp") -> None: - pass - - @mark_no_op - def leave_SimpleStatementLine(self, original_node: "SimpleStatementLine") -> None: - pass - - @mark_no_op - def leave_SimpleStatementSuite(self, original_node: "SimpleStatementSuite") -> None: - pass - - @mark_no_op - def leave_SimpleString(self, original_node: "SimpleString") -> None: - pass - - @mark_no_op - def leave_SimpleWhitespace(self, original_node: "SimpleWhitespace") -> None: - pass - - @mark_no_op - def leave_Slice(self, original_node: "Slice") -> None: - pass - - @mark_no_op - def leave_StarredDictElement(self, original_node: "StarredDictElement") -> None: - pass - - @mark_no_op - def leave_StarredElement(self, original_node: "StarredElement") -> None: - pass - - @mark_no_op - def leave_Subscript(self, original_node: "Subscript") -> None: - pass - - @mark_no_op - def leave_SubscriptElement(self, original_node: "SubscriptElement") -> None: - pass - - @mark_no_op - def leave_Subtract(self, original_node: "Subtract") -> None: - pass - - @mark_no_op - def leave_SubtractAssign(self, original_node: "SubtractAssign") -> None: - pass - - @mark_no_op - def leave_TrailingWhitespace(self, original_node: "TrailingWhitespace") -> None: - pass - - @mark_no_op - def leave_Try(self, original_node: "Try") -> None: - pass - - @mark_no_op - def leave_TryStar(self, original_node: "TryStar") -> None: - pass - - @mark_no_op - def leave_Tuple(self, original_node: "Tuple") -> None: - pass - - @mark_no_op - def leave_TypeAlias(self, original_node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeParam(self, original_node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParameters(self, original_node: "TypeParameters") -> None: - pass - - @mark_no_op - def leave_TypeVar(self, original_node: "TypeVar") -> None: - pass - - @mark_no_op - def leave_TypeVarTuple(self, original_node: "TypeVarTuple") -> None: - pass - - @mark_no_op - def leave_UnaryOperation(self, original_node: "UnaryOperation") -> None: - pass - - @mark_no_op - def leave_While(self, original_node: "While") -> None: - pass - - @mark_no_op - def leave_With(self, original_node: "With") -> None: - pass - - @mark_no_op - def leave_WithItem(self, original_node: "WithItem") -> None: - pass - - @mark_no_op - def leave_Yield(self, original_node: "Yield") -> None: - pass - - -class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): - @mark_no_op - def leave_Add(self, original_node: "Add", updated_node: "Add") -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_AddAssign( - self, original_node: "AddAssign", updated_node: "AddAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_And(self, original_node: "And", updated_node: "And") -> "BaseBooleanOp": - return updated_node - - @mark_no_op - def leave_AnnAssign( - self, original_node: "AnnAssign", updated_node: "AnnAssign" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Annotation( - self, original_node: "Annotation", updated_node: "Annotation" - ) -> "Annotation": - return updated_node - - @mark_no_op - def leave_Arg( - self, original_node: "Arg", updated_node: "Arg" - ) -> Union["Arg", FlattenSentinel["Arg"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_AsName(self, original_node: "AsName", updated_node: "AsName") -> "AsName": - return updated_node - - @mark_no_op - def leave_Assert( - self, original_node: "Assert", updated_node: "Assert" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Assign( - self, original_node: "Assign", updated_node: "Assign" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_AssignEqual( - self, original_node: "AssignEqual", updated_node: "AssignEqual" - ) -> Union["AssignEqual", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_AssignTarget( - self, original_node: "AssignTarget", updated_node: "AssignTarget" - ) -> Union["AssignTarget", FlattenSentinel["AssignTarget"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Asynchronous( - self, original_node: "Asynchronous", updated_node: "Asynchronous" - ) -> "Asynchronous": - return updated_node - - @mark_no_op - def leave_Attribute( - self, original_node: "Attribute", updated_node: "Attribute" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_AugAssign( - self, original_node: "AugAssign", updated_node: "AugAssign" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Await( - self, original_node: "Await", updated_node: "Await" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_BinaryOperation( - self, original_node: "BinaryOperation", updated_node: "BinaryOperation" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_BitAnd( - self, original_node: "BitAnd", updated_node: "BitAnd" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_BitAndAssign( - self, original_node: "BitAndAssign", updated_node: "BitAndAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_BitInvert( - self, original_node: "BitInvert", updated_node: "BitInvert" - ) -> "BaseUnaryOp": - return updated_node - - @mark_no_op - def leave_BitOr( - self, original_node: "BitOr", updated_node: "BitOr" - ) -> Union["BaseBinaryOp", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_BitOrAssign( - self, original_node: "BitOrAssign", updated_node: "BitOrAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_BitXor( - self, original_node: "BitXor", updated_node: "BitXor" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_BitXorAssign( - self, original_node: "BitXorAssign", updated_node: "BitXorAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_BooleanOperation( - self, original_node: "BooleanOperation", updated_node: "BooleanOperation" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Break( - self, original_node: "Break", updated_node: "Break" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Call( - self, original_node: "Call", updated_node: "Call" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_ClassDef( - self, original_node: "ClassDef", updated_node: "ClassDef" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Colon( - self, original_node: "Colon", updated_node: "Colon" - ) -> Union["Colon", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Comma( - self, original_node: "Comma", updated_node: "Comma" - ) -> Union["Comma", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Comment( - self, original_node: "Comment", updated_node: "Comment" - ) -> "Comment": - return updated_node - - @mark_no_op - def leave_CompFor( - self, original_node: "CompFor", updated_node: "CompFor" - ) -> "CompFor": - return updated_node - - @mark_no_op - def leave_CompIf(self, original_node: "CompIf", updated_node: "CompIf") -> "CompIf": - return updated_node - - @mark_no_op - def leave_Comparison( - self, original_node: "Comparison", updated_node: "Comparison" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_ComparisonTarget( - self, original_node: "ComparisonTarget", updated_node: "ComparisonTarget" - ) -> Union[ - "ComparisonTarget", FlattenSentinel["ComparisonTarget"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_ConcatenatedString( - self, original_node: "ConcatenatedString", updated_node: "ConcatenatedString" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Continue( - self, original_node: "Continue", updated_node: "Continue" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Decorator( - self, original_node: "Decorator", updated_node: "Decorator" - ) -> Union["Decorator", FlattenSentinel["Decorator"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Del( - self, original_node: "Del", updated_node: "Del" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Dict( - self, original_node: "Dict", updated_node: "Dict" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_DictComp( - self, original_node: "DictComp", updated_node: "DictComp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_DictElement( - self, original_node: "DictElement", updated_node: "DictElement" - ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Divide( - self, original_node: "Divide", updated_node: "Divide" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_DivideAssign( - self, original_node: "DivideAssign", updated_node: "DivideAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Dot( - self, original_node: "Dot", updated_node: "Dot" - ) -> Union["Dot", FlattenSentinel["Dot"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Element( - self, original_node: "Element", updated_node: "Element" - ) -> Union["BaseElement", FlattenSentinel["BaseElement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Ellipsis( - self, original_node: "Ellipsis", updated_node: "Ellipsis" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Else(self, original_node: "Else", updated_node: "Else") -> "Else": - return updated_node - - @mark_no_op - def leave_EmptyLine( - self, original_node: "EmptyLine", updated_node: "EmptyLine" - ) -> Union["EmptyLine", FlattenSentinel["EmptyLine"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Equal( - self, original_node: "Equal", updated_node: "Equal" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_ExceptHandler( - self, original_node: "ExceptHandler", updated_node: "ExceptHandler" - ) -> Union["ExceptHandler", FlattenSentinel["ExceptHandler"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_ExceptStarHandler( - self, original_node: "ExceptStarHandler", updated_node: "ExceptStarHandler" - ) -> Union[ - "ExceptStarHandler", FlattenSentinel["ExceptStarHandler"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Expr( - self, original_node: "Expr", updated_node: "Expr" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Finally( - self, original_node: "Finally", updated_node: "Finally" - ) -> "Finally": - return updated_node - - @mark_no_op - def leave_Float( - self, original_node: "Float", updated_node: "Float" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_FloorDivide( - self, original_node: "FloorDivide", updated_node: "FloorDivide" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_FloorDivideAssign( - self, original_node: "FloorDivideAssign", updated_node: "FloorDivideAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_For( - self, original_node: "For", updated_node: "For" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_FormattedString( - self, original_node: "FormattedString", updated_node: "FormattedString" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_FormattedStringExpression( - self, - original_node: "FormattedStringExpression", - updated_node: "FormattedStringExpression", - ) -> Union[ - "BaseFormattedStringContent", - FlattenSentinel["BaseFormattedStringContent"], - RemovalSentinel, - ]: - return updated_node - - @mark_no_op - def leave_FormattedStringText( - self, original_node: "FormattedStringText", updated_node: "FormattedStringText" - ) -> Union[ - "BaseFormattedStringContent", - FlattenSentinel["BaseFormattedStringContent"], - RemovalSentinel, - ]: - return updated_node - - @mark_no_op - def leave_From(self, original_node: "From", updated_node: "From") -> "From": - return updated_node - - @mark_no_op - def leave_FunctionDef( - self, original_node: "FunctionDef", updated_node: "FunctionDef" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_GeneratorExp( - self, original_node: "GeneratorExp", updated_node: "GeneratorExp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Global( - self, original_node: "Global", updated_node: "Global" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_GreaterThan( - self, original_node: "GreaterThan", updated_node: "GreaterThan" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_GreaterThanEqual( - self, original_node: "GreaterThanEqual", updated_node: "GreaterThanEqual" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_If( - self, original_node: "If", updated_node: "If" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_IfExp( - self, original_node: "IfExp", updated_node: "IfExp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Imaginary( - self, original_node: "Imaginary", updated_node: "Imaginary" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Import( - self, original_node: "Import", updated_node: "Import" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_ImportAlias( - self, original_node: "ImportAlias", updated_node: "ImportAlias" - ) -> Union["ImportAlias", FlattenSentinel["ImportAlias"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_ImportFrom( - self, original_node: "ImportFrom", updated_node: "ImportFrom" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_ImportStar( - self, original_node: "ImportStar", updated_node: "ImportStar" - ) -> "ImportStar": - return updated_node - - @mark_no_op - def leave_In(self, original_node: "In", updated_node: "In") -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_IndentedBlock( - self, original_node: "IndentedBlock", updated_node: "IndentedBlock" - ) -> "BaseSuite": - return updated_node - - @mark_no_op - def leave_Index(self, original_node: "Index", updated_node: "Index") -> "BaseSlice": - return updated_node - - @mark_no_op - def leave_Integer( - self, original_node: "Integer", updated_node: "Integer" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Is(self, original_node: "Is", updated_node: "Is") -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_IsNot( - self, original_node: "IsNot", updated_node: "IsNot" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_Lambda( - self, original_node: "Lambda", updated_node: "Lambda" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_LeftCurlyBrace( - self, original_node: "LeftCurlyBrace", updated_node: "LeftCurlyBrace" - ) -> "LeftCurlyBrace": - return updated_node - - @mark_no_op - def leave_LeftParen( - self, original_node: "LeftParen", updated_node: "LeftParen" - ) -> Union[ - "LeftParen", MaybeSentinel, FlattenSentinel["LeftParen"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_LeftShift( - self, original_node: "LeftShift", updated_node: "LeftShift" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_LeftShiftAssign( - self, original_node: "LeftShiftAssign", updated_node: "LeftShiftAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_LeftSquareBracket( - self, original_node: "LeftSquareBracket", updated_node: "LeftSquareBracket" - ) -> "LeftSquareBracket": - return updated_node - - @mark_no_op - def leave_LessThan( - self, original_node: "LessThan", updated_node: "LessThan" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_LessThanEqual( - self, original_node: "LessThanEqual", updated_node: "LessThanEqual" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_List( - self, original_node: "List", updated_node: "List" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_ListComp( - self, original_node: "ListComp", updated_node: "ListComp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Match( - self, original_node: "Match", updated_node: "Match" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_MatchAs( - self, original_node: "MatchAs", updated_node: "MatchAs" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchCase( - self, original_node: "MatchCase", updated_node: "MatchCase" - ) -> "MatchCase": - return updated_node - - @mark_no_op - def leave_MatchClass( - self, original_node: "MatchClass", updated_node: "MatchClass" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchKeywordElement( - self, original_node: "MatchKeywordElement", updated_node: "MatchKeywordElement" - ) -> Union[ - "MatchKeywordElement", FlattenSentinel["MatchKeywordElement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_MatchList( - self, original_node: "MatchList", updated_node: "MatchList" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchMapping( - self, original_node: "MatchMapping", updated_node: "MatchMapping" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchMappingElement( - self, original_node: "MatchMappingElement", updated_node: "MatchMappingElement" - ) -> Union[ - "MatchMappingElement", FlattenSentinel["MatchMappingElement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_MatchOr( - self, original_node: "MatchOr", updated_node: "MatchOr" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchOrElement( - self, original_node: "MatchOrElement", updated_node: "MatchOrElement" - ) -> Union["MatchOrElement", FlattenSentinel["MatchOrElement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_MatchPattern( - self, original_node: "MatchPattern", updated_node: "MatchPattern" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchSequence( - self, original_node: "MatchSequence", updated_node: "MatchSequence" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchSequenceElement( - self, - original_node: "MatchSequenceElement", - updated_node: "MatchSequenceElement", - ) -> Union[ - "MatchSequenceElement", FlattenSentinel["MatchSequenceElement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_MatchSingleton( - self, original_node: "MatchSingleton", updated_node: "MatchSingleton" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchStar( - self, original_node: "MatchStar", updated_node: "MatchStar" - ) -> "MatchStar": - return updated_node - - @mark_no_op - def leave_MatchTuple( - self, original_node: "MatchTuple", updated_node: "MatchTuple" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatchValue( - self, original_node: "MatchValue", updated_node: "MatchValue" - ) -> "MatchPattern": - return updated_node - - @mark_no_op - def leave_MatrixMultiply( - self, original_node: "MatrixMultiply", updated_node: "MatrixMultiply" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_MatrixMultiplyAssign( - self, - original_node: "MatrixMultiplyAssign", - updated_node: "MatrixMultiplyAssign", - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Minus( - self, original_node: "Minus", updated_node: "Minus" - ) -> "BaseUnaryOp": - return updated_node - - @mark_no_op - def leave_Module(self, original_node: "Module", updated_node: "Module") -> "Module": - return updated_node - - @mark_no_op - def leave_Modulo( - self, original_node: "Modulo", updated_node: "Modulo" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_ModuloAssign( - self, original_node: "ModuloAssign", updated_node: "ModuloAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Multiply( - self, original_node: "Multiply", updated_node: "Multiply" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_MultiplyAssign( - self, original_node: "MultiplyAssign", updated_node: "MultiplyAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Name( - self, original_node: "Name", updated_node: "Name" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_NameItem( - self, original_node: "NameItem", updated_node: "NameItem" - ) -> Union["NameItem", FlattenSentinel["NameItem"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_NamedExpr( - self, original_node: "NamedExpr", updated_node: "NamedExpr" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Newline( - self, original_node: "Newline", updated_node: "Newline" - ) -> "Newline": - return updated_node - - @mark_no_op - def leave_Nonlocal( - self, original_node: "Nonlocal", updated_node: "Nonlocal" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Not(self, original_node: "Not", updated_node: "Not") -> "BaseUnaryOp": - return updated_node - - @mark_no_op - def leave_NotEqual( - self, original_node: "NotEqual", updated_node: "NotEqual" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_NotIn( - self, original_node: "NotIn", updated_node: "NotIn" - ) -> "BaseCompOp": - return updated_node - - @mark_no_op - def leave_Or(self, original_node: "Or", updated_node: "Or") -> "BaseBooleanOp": - return updated_node - - @mark_no_op - def leave_Param( - self, original_node: "Param", updated_node: "Param" - ) -> Union["Param", MaybeSentinel, FlattenSentinel["Param"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_ParamSlash( - self, original_node: "ParamSlash", updated_node: "ParamSlash" - ) -> Union["ParamSlash", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_ParamSpec( - self, original_node: "ParamSpec", updated_node: "ParamSpec" - ) -> "ParamSpec": - return updated_node - - @mark_no_op - def leave_ParamStar( - self, original_node: "ParamStar", updated_node: "ParamStar" - ) -> Union["ParamStar", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Parameters( - self, original_node: "Parameters", updated_node: "Parameters" - ) -> "Parameters": - return updated_node - - @mark_no_op - def leave_ParenthesizedWhitespace( - self, - original_node: "ParenthesizedWhitespace", - updated_node: "ParenthesizedWhitespace", - ) -> Union["BaseParenthesizableWhitespace", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Pass( - self, original_node: "Pass", updated_node: "Pass" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Plus(self, original_node: "Plus", updated_node: "Plus") -> "BaseUnaryOp": - return updated_node - - @mark_no_op - def leave_Power( - self, original_node: "Power", updated_node: "Power" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_PowerAssign( - self, original_node: "PowerAssign", updated_node: "PowerAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_Raise( - self, original_node: "Raise", updated_node: "Raise" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Return( - self, original_node: "Return", updated_node: "Return" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_RightCurlyBrace( - self, original_node: "RightCurlyBrace", updated_node: "RightCurlyBrace" - ) -> "RightCurlyBrace": - return updated_node - - @mark_no_op - def leave_RightParen( - self, original_node: "RightParen", updated_node: "RightParen" - ) -> Union[ - "RightParen", MaybeSentinel, FlattenSentinel["RightParen"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_RightShift( - self, original_node: "RightShift", updated_node: "RightShift" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_RightShiftAssign( - self, original_node: "RightShiftAssign", updated_node: "RightShiftAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_RightSquareBracket( - self, original_node: "RightSquareBracket", updated_node: "RightSquareBracket" - ) -> "RightSquareBracket": - return updated_node - - @mark_no_op - def leave_Semicolon( - self, original_node: "Semicolon", updated_node: "Semicolon" - ) -> Union["Semicolon", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Set(self, original_node: "Set", updated_node: "Set") -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_SetComp( - self, original_node: "SetComp", updated_node: "SetComp" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_SimpleStatementLine( - self, original_node: "SimpleStatementLine", updated_node: "SimpleStatementLine" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_SimpleStatementSuite( - self, - original_node: "SimpleStatementSuite", - updated_node: "SimpleStatementSuite", - ) -> "BaseSuite": - return updated_node - - @mark_no_op - def leave_SimpleString( - self, original_node: "SimpleString", updated_node: "SimpleString" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_SimpleWhitespace( - self, original_node: "SimpleWhitespace", updated_node: "SimpleWhitespace" - ) -> Union["BaseParenthesizableWhitespace", MaybeSentinel]: - return updated_node - - @mark_no_op - def leave_Slice(self, original_node: "Slice", updated_node: "Slice") -> "BaseSlice": - return updated_node - - @mark_no_op - def leave_StarredDictElement( - self, original_node: "StarredDictElement", updated_node: "StarredDictElement" - ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_StarredElement( - self, original_node: "StarredElement", updated_node: "StarredElement" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_Subscript( - self, original_node: "Subscript", updated_node: "Subscript" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_SubscriptElement( - self, original_node: "SubscriptElement", updated_node: "SubscriptElement" - ) -> Union[ - "SubscriptElement", FlattenSentinel["SubscriptElement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_Subtract( - self, original_node: "Subtract", updated_node: "Subtract" - ) -> "BaseBinaryOp": - return updated_node - - @mark_no_op - def leave_SubtractAssign( - self, original_node: "SubtractAssign", updated_node: "SubtractAssign" - ) -> "BaseAugOp": - return updated_node - - @mark_no_op - def leave_TrailingWhitespace( - self, original_node: "TrailingWhitespace", updated_node: "TrailingWhitespace" - ) -> "TrailingWhitespace": - return updated_node - - @mark_no_op - def leave_Try( - self, original_node: "Try", updated_node: "Try" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_TryStar( - self, original_node: "TryStar", updated_node: "TryStar" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Tuple( - self, original_node: "Tuple", updated_node: "Tuple" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_TypeAlias( - self, original_node: "TypeAlias", updated_node: "TypeAlias" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_TypeParam( - self, original_node: "TypeParam", updated_node: "TypeParam" - ) -> Union["TypeParam", FlattenSentinel["TypeParam"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_TypeParameters( - self, original_node: "TypeParameters", updated_node: "TypeParameters" - ) -> "TypeParameters": - return updated_node - - @mark_no_op - def leave_TypeVar( - self, original_node: "TypeVar", updated_node: "TypeVar" - ) -> "TypeVar": - return updated_node - - @mark_no_op - def leave_TypeVarTuple( - self, original_node: "TypeVarTuple", updated_node: "TypeVarTuple" - ) -> "TypeVarTuple": - return updated_node - - @mark_no_op - def leave_UnaryOperation( - self, original_node: "UnaryOperation", updated_node: "UnaryOperation" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_While( - self, original_node: "While", updated_node: "While" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_With( - self, original_node: "With", updated_node: "With" - ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_WithItem( - self, original_node: "WithItem", updated_node: "WithItem" - ) -> Union["WithItem", FlattenSentinel["WithItem"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_Yield( - self, original_node: "Yield", updated_node: "Yield" - ) -> "BaseExpression": - return updated_node +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +# This file was generated by libcst.codegen.gen_matcher_classes +from typing import Optional, TYPE_CHECKING, Union + +from libcst._flatten_sentinel import FlattenSentinel +from libcst._maybe_sentinel import MaybeSentinel +from libcst._removal_sentinel import RemovalSentinel +from libcst._typed_visitor_base import mark_no_op + + +if TYPE_CHECKING: + from libcst._nodes.expression import ( # noqa: F401 + Annotation, + Arg, + Asynchronous, + Attribute, + Await, + BaseDictElement, + BaseElement, + BaseExpression, + BaseFormattedStringContent, + BaseSlice, + BinaryOperation, + BooleanOperation, + Call, + Comparison, + ComparisonTarget, + CompFor, + CompIf, + ConcatenatedString, + Dict, + DictComp, + DictElement, + Element, + Ellipsis, + Float, + FormattedString, + FormattedStringExpression, + FormattedStringText, + From, + GeneratorExp, + IfExp, + Imaginary, + Index, + Integer, + Lambda, + LeftCurlyBrace, + LeftParen, + LeftSquareBracket, + List, + ListComp, + Name, + NamedExpr, + Param, + Parameters, + ParamSlash, + ParamStar, + RightCurlyBrace, + RightParen, + RightSquareBracket, + Set, + SetComp, + SimpleString, + Slice, + StarredDictElement, + StarredElement, + Subscript, + SubscriptElement, + Tuple, + UnaryOperation, + Yield, + ) + from libcst._nodes.module import Module # noqa: F401 + from libcst._nodes.op import ( # noqa: F401 + Add, + AddAssign, + And, + AssignEqual, + BaseAugOp, + BaseBinaryOp, + BaseBooleanOp, + BaseCompOp, + BaseUnaryOp, + BitAnd, + BitAndAssign, + BitInvert, + BitOr, + BitOrAssign, + BitXor, + BitXorAssign, + Colon, + Comma, + Divide, + DivideAssign, + Dot, + Equal, + FloorDivide, + FloorDivideAssign, + GreaterThan, + GreaterThanEqual, + ImportStar, + In, + Is, + IsNot, + LeftShift, + LeftShiftAssign, + LessThan, + LessThanEqual, + MatrixMultiply, + MatrixMultiplyAssign, + Minus, + Modulo, + ModuloAssign, + Multiply, + MultiplyAssign, + Not, + NotEqual, + NotIn, + Or, + Plus, + Power, + PowerAssign, + RightShift, + RightShiftAssign, + Semicolon, + Subtract, + SubtractAssign, + ) + from libcst._nodes.statement import ( # noqa: F401 + AnnAssign, + AsName, + Assert, + Assign, + AssignTarget, + AugAssign, + BaseSmallStatement, + BaseStatement, + BaseSuite, + Break, + ClassDef, + Continue, + Decorator, + Del, + Else, + ExceptHandler, + ExceptStarHandler, + Expr, + Finally, + For, + FunctionDef, + Global, + If, + Import, + ImportAlias, + ImportFrom, + IndentedBlock, + Match, + MatchAs, + MatchCase, + MatchClass, + MatchKeywordElement, + MatchList, + MatchMapping, + MatchMappingElement, + MatchOr, + MatchOrElement, + MatchPattern, + MatchSequence, + MatchSequenceElement, + MatchSingleton, + MatchStar, + MatchTuple, + MatchValue, + NameItem, + Nonlocal, + ParamSpec, + Pass, + Raise, + Return, + SimpleStatementLine, + SimpleStatementSuite, + Try, + TryStar, + TypeAlias, + TypeParam, + TypeParameters, + TypeVar, + TypeVarTuple, + While, + With, + WithItem, + ) + from libcst._nodes.whitespace import ( # noqa: F401 + BaseParenthesizableWhitespace, + Comment, + EmptyLine, + Newline, + ParenthesizedWhitespace, + SimpleWhitespace, + TrailingWhitespace, + ) + + +class CSTTypedBaseFunctions: + @mark_no_op + def visit_Add(self, node: "Add") -> Optional[bool]: + pass + + @mark_no_op + def visit_Add_whitespace_before(self, node: "Add") -> None: + pass + + @mark_no_op + def leave_Add_whitespace_before(self, node: "Add") -> None: + pass + + @mark_no_op + def visit_Add_whitespace_after(self, node: "Add") -> None: + pass + + @mark_no_op + def leave_Add_whitespace_after(self, node: "Add") -> None: + pass + + @mark_no_op + def visit_AddAssign(self, node: "AddAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_AddAssign_whitespace_before(self, node: "AddAssign") -> None: + pass + + @mark_no_op + def leave_AddAssign_whitespace_before(self, node: "AddAssign") -> None: + pass + + @mark_no_op + def visit_AddAssign_whitespace_after(self, node: "AddAssign") -> None: + pass + + @mark_no_op + def leave_AddAssign_whitespace_after(self, node: "AddAssign") -> None: + pass + + @mark_no_op + def visit_And(self, node: "And") -> Optional[bool]: + pass + + @mark_no_op + def visit_And_whitespace_before(self, node: "And") -> None: + pass + + @mark_no_op + def leave_And_whitespace_before(self, node: "And") -> None: + pass + + @mark_no_op + def visit_And_whitespace_after(self, node: "And") -> None: + pass + + @mark_no_op + def leave_And_whitespace_after(self, node: "And") -> None: + pass + + @mark_no_op + def visit_AnnAssign(self, node: "AnnAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_AnnAssign_target(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_target(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_AnnAssign_annotation(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_annotation(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_AnnAssign_value(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_value(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_AnnAssign_equal(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_equal(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_AnnAssign_semicolon(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_AnnAssign_semicolon(self, node: "AnnAssign") -> None: + pass + + @mark_no_op + def visit_Annotation(self, node: "Annotation") -> Optional[bool]: + pass + + @mark_no_op + def visit_Annotation_annotation(self, node: "Annotation") -> None: + pass + + @mark_no_op + def leave_Annotation_annotation(self, node: "Annotation") -> None: + pass + + @mark_no_op + def visit_Annotation_whitespace_before_indicator(self, node: "Annotation") -> None: + pass + + @mark_no_op + def leave_Annotation_whitespace_before_indicator(self, node: "Annotation") -> None: + pass + + @mark_no_op + def visit_Annotation_whitespace_after_indicator(self, node: "Annotation") -> None: + pass + + @mark_no_op + def leave_Annotation_whitespace_after_indicator(self, node: "Annotation") -> None: + pass + + @mark_no_op + def visit_Arg(self, node: "Arg") -> Optional[bool]: + pass + + @mark_no_op + def visit_Arg_value(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_value(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_keyword(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_keyword(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_equal(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_equal(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_comma(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_comma(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_star(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_star(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_whitespace_after_star(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_whitespace_after_star(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_Arg_whitespace_after_arg(self, node: "Arg") -> None: + pass + + @mark_no_op + def leave_Arg_whitespace_after_arg(self, node: "Arg") -> None: + pass + + @mark_no_op + def visit_AsName(self, node: "AsName") -> Optional[bool]: + pass + + @mark_no_op + def visit_AsName_name(self, node: "AsName") -> None: + pass + + @mark_no_op + def leave_AsName_name(self, node: "AsName") -> None: + pass + + @mark_no_op + def visit_AsName_whitespace_before_as(self, node: "AsName") -> None: + pass + + @mark_no_op + def leave_AsName_whitespace_before_as(self, node: "AsName") -> None: + pass + + @mark_no_op + def visit_AsName_whitespace_after_as(self, node: "AsName") -> None: + pass + + @mark_no_op + def leave_AsName_whitespace_after_as(self, node: "AsName") -> None: + pass + + @mark_no_op + def visit_Assert(self, node: "Assert") -> Optional[bool]: + pass + + @mark_no_op + def visit_Assert_test(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_test(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assert_msg(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_msg(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assert_comma(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_comma(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assert_whitespace_after_assert(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_whitespace_after_assert(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assert_semicolon(self, node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assert_semicolon(self, node: "Assert") -> None: + pass + + @mark_no_op + def visit_Assign(self, node: "Assign") -> Optional[bool]: + pass + + @mark_no_op + def visit_Assign_targets(self, node: "Assign") -> None: + pass + + @mark_no_op + def leave_Assign_targets(self, node: "Assign") -> None: + pass + + @mark_no_op + def visit_Assign_value(self, node: "Assign") -> None: + pass + + @mark_no_op + def leave_Assign_value(self, node: "Assign") -> None: + pass + + @mark_no_op + def visit_Assign_semicolon(self, node: "Assign") -> None: + pass + + @mark_no_op + def leave_Assign_semicolon(self, node: "Assign") -> None: + pass + + @mark_no_op + def visit_AssignEqual(self, node: "AssignEqual") -> Optional[bool]: + pass + + @mark_no_op + def visit_AssignEqual_whitespace_before(self, node: "AssignEqual") -> None: + pass + + @mark_no_op + def leave_AssignEqual_whitespace_before(self, node: "AssignEqual") -> None: + pass + + @mark_no_op + def visit_AssignEqual_whitespace_after(self, node: "AssignEqual") -> None: + pass + + @mark_no_op + def leave_AssignEqual_whitespace_after(self, node: "AssignEqual") -> None: + pass + + @mark_no_op + def visit_AssignTarget(self, node: "AssignTarget") -> Optional[bool]: + pass + + @mark_no_op + def visit_AssignTarget_target(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def leave_AssignTarget_target(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def visit_AssignTarget_whitespace_before_equal(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def leave_AssignTarget_whitespace_before_equal(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def visit_AssignTarget_whitespace_after_equal(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def leave_AssignTarget_whitespace_after_equal(self, node: "AssignTarget") -> None: + pass + + @mark_no_op + def visit_Asynchronous(self, node: "Asynchronous") -> Optional[bool]: + pass + + @mark_no_op + def visit_Asynchronous_whitespace_after(self, node: "Asynchronous") -> None: + pass + + @mark_no_op + def leave_Asynchronous_whitespace_after(self, node: "Asynchronous") -> None: + pass + + @mark_no_op + def visit_Attribute(self, node: "Attribute") -> Optional[bool]: + pass + + @mark_no_op + def visit_Attribute_value(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_value(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_Attribute_attr(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_attr(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_Attribute_dot(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_dot(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_Attribute_lpar(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_lpar(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_Attribute_rpar(self, node: "Attribute") -> None: + pass + + @mark_no_op + def leave_Attribute_rpar(self, node: "Attribute") -> None: + pass + + @mark_no_op + def visit_AugAssign(self, node: "AugAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_AugAssign_target(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_AugAssign_target(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def visit_AugAssign_operator(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_AugAssign_operator(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def visit_AugAssign_value(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_AugAssign_value(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def visit_AugAssign_semicolon(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_AugAssign_semicolon(self, node: "AugAssign") -> None: + pass + + @mark_no_op + def visit_Await(self, node: "Await") -> Optional[bool]: + pass + + @mark_no_op + def visit_Await_expression(self, node: "Await") -> None: + pass + + @mark_no_op + def leave_Await_expression(self, node: "Await") -> None: + pass + + @mark_no_op + def visit_Await_lpar(self, node: "Await") -> None: + pass + + @mark_no_op + def leave_Await_lpar(self, node: "Await") -> None: + pass + + @mark_no_op + def visit_Await_rpar(self, node: "Await") -> None: + pass + + @mark_no_op + def leave_Await_rpar(self, node: "Await") -> None: + pass + + @mark_no_op + def visit_Await_whitespace_after_await(self, node: "Await") -> None: + pass + + @mark_no_op + def leave_Await_whitespace_after_await(self, node: "Await") -> None: + pass + + @mark_no_op + def visit_BinaryOperation(self, node: "BinaryOperation") -> Optional[bool]: + pass + + @mark_no_op + def visit_BinaryOperation_left(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_left(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BinaryOperation_operator(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_operator(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BinaryOperation_right(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_right(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BinaryOperation_lpar(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_lpar(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BinaryOperation_rpar(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BinaryOperation_rpar(self, node: "BinaryOperation") -> None: + pass + + @mark_no_op + def visit_BitAnd(self, node: "BitAnd") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitAnd_whitespace_before(self, node: "BitAnd") -> None: + pass + + @mark_no_op + def leave_BitAnd_whitespace_before(self, node: "BitAnd") -> None: + pass + + @mark_no_op + def visit_BitAnd_whitespace_after(self, node: "BitAnd") -> None: + pass + + @mark_no_op + def leave_BitAnd_whitespace_after(self, node: "BitAnd") -> None: + pass + + @mark_no_op + def visit_BitAndAssign(self, node: "BitAndAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitAndAssign_whitespace_before(self, node: "BitAndAssign") -> None: + pass + + @mark_no_op + def leave_BitAndAssign_whitespace_before(self, node: "BitAndAssign") -> None: + pass + + @mark_no_op + def visit_BitAndAssign_whitespace_after(self, node: "BitAndAssign") -> None: + pass + + @mark_no_op + def leave_BitAndAssign_whitespace_after(self, node: "BitAndAssign") -> None: + pass + + @mark_no_op + def visit_BitInvert(self, node: "BitInvert") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitInvert_whitespace_after(self, node: "BitInvert") -> None: + pass + + @mark_no_op + def leave_BitInvert_whitespace_after(self, node: "BitInvert") -> None: + pass + + @mark_no_op + def visit_BitOr(self, node: "BitOr") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitOr_whitespace_before(self, node: "BitOr") -> None: + pass + + @mark_no_op + def leave_BitOr_whitespace_before(self, node: "BitOr") -> None: + pass + + @mark_no_op + def visit_BitOr_whitespace_after(self, node: "BitOr") -> None: + pass + + @mark_no_op + def leave_BitOr_whitespace_after(self, node: "BitOr") -> None: + pass + + @mark_no_op + def visit_BitOrAssign(self, node: "BitOrAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitOrAssign_whitespace_before(self, node: "BitOrAssign") -> None: + pass + + @mark_no_op + def leave_BitOrAssign_whitespace_before(self, node: "BitOrAssign") -> None: + pass + + @mark_no_op + def visit_BitOrAssign_whitespace_after(self, node: "BitOrAssign") -> None: + pass + + @mark_no_op + def leave_BitOrAssign_whitespace_after(self, node: "BitOrAssign") -> None: + pass + + @mark_no_op + def visit_BitXor(self, node: "BitXor") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitXor_whitespace_before(self, node: "BitXor") -> None: + pass + + @mark_no_op + def leave_BitXor_whitespace_before(self, node: "BitXor") -> None: + pass + + @mark_no_op + def visit_BitXor_whitespace_after(self, node: "BitXor") -> None: + pass + + @mark_no_op + def leave_BitXor_whitespace_after(self, node: "BitXor") -> None: + pass + + @mark_no_op + def visit_BitXorAssign(self, node: "BitXorAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_BitXorAssign_whitespace_before(self, node: "BitXorAssign") -> None: + pass + + @mark_no_op + def leave_BitXorAssign_whitespace_before(self, node: "BitXorAssign") -> None: + pass + + @mark_no_op + def visit_BitXorAssign_whitespace_after(self, node: "BitXorAssign") -> None: + pass + + @mark_no_op + def leave_BitXorAssign_whitespace_after(self, node: "BitXorAssign") -> None: + pass + + @mark_no_op + def visit_BooleanOperation(self, node: "BooleanOperation") -> Optional[bool]: + pass + + @mark_no_op + def visit_BooleanOperation_left(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_left(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_BooleanOperation_operator(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_operator(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_BooleanOperation_right(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_right(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_BooleanOperation_lpar(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_lpar(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_BooleanOperation_rpar(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_BooleanOperation_rpar(self, node: "BooleanOperation") -> None: + pass + + @mark_no_op + def visit_Break(self, node: "Break") -> Optional[bool]: + pass + + @mark_no_op + def visit_Break_semicolon(self, node: "Break") -> None: + pass + + @mark_no_op + def leave_Break_semicolon(self, node: "Break") -> None: + pass + + @mark_no_op + def visit_Call(self, node: "Call") -> Optional[bool]: + pass + + @mark_no_op + def visit_Call_func(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_func(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_args(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_args(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_lpar(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_lpar(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_rpar(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_rpar(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_whitespace_after_func(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_whitespace_after_func(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_Call_whitespace_before_args(self, node: "Call") -> None: + pass + + @mark_no_op + def leave_Call_whitespace_before_args(self, node: "Call") -> None: + pass + + @mark_no_op + def visit_ClassDef(self, node: "ClassDef") -> Optional[bool]: + pass + + @mark_no_op + def visit_ClassDef_name(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_name(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_body(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_body(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_bases(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_bases(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_keywords(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_keywords(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_decorators(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_decorators(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_lpar(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_lpar(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_rpar(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_rpar(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_leading_lines(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_leading_lines(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_lines_after_decorators(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_lines_after_decorators(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_whitespace_after_class(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_whitespace_after_class(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_whitespace_after_name(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_whitespace_after_name(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_type_parameters(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_type_parameters(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_ClassDef_whitespace_after_type_parameters(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_ClassDef_whitespace_after_type_parameters(self, node: "ClassDef") -> None: + pass + + @mark_no_op + def visit_Colon(self, node: "Colon") -> Optional[bool]: + pass + + @mark_no_op + def visit_Colon_whitespace_before(self, node: "Colon") -> None: + pass + + @mark_no_op + def leave_Colon_whitespace_before(self, node: "Colon") -> None: + pass + + @mark_no_op + def visit_Colon_whitespace_after(self, node: "Colon") -> None: + pass + + @mark_no_op + def leave_Colon_whitespace_after(self, node: "Colon") -> None: + pass + + @mark_no_op + def visit_Comma(self, node: "Comma") -> Optional[bool]: + pass + + @mark_no_op + def visit_Comma_whitespace_before(self, node: "Comma") -> None: + pass + + @mark_no_op + def leave_Comma_whitespace_before(self, node: "Comma") -> None: + pass + + @mark_no_op + def visit_Comma_whitespace_after(self, node: "Comma") -> None: + pass + + @mark_no_op + def leave_Comma_whitespace_after(self, node: "Comma") -> None: + pass + + @mark_no_op + def visit_Comment(self, node: "Comment") -> Optional[bool]: + pass + + @mark_no_op + def visit_Comment_value(self, node: "Comment") -> None: + pass + + @mark_no_op + def leave_Comment_value(self, node: "Comment") -> None: + pass + + @mark_no_op + def visit_CompFor(self, node: "CompFor") -> Optional[bool]: + pass + + @mark_no_op + def visit_CompFor_target(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_target(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_iter(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_iter(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_ifs(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_ifs(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_inner_for_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_inner_for_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_asynchronous(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_asynchronous(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_whitespace_before(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_whitespace_before(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_whitespace_after_for(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_whitespace_after_for(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_whitespace_before_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_whitespace_before_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompFor_whitespace_after_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompFor_whitespace_after_in(self, node: "CompFor") -> None: + pass + + @mark_no_op + def visit_CompIf(self, node: "CompIf") -> Optional[bool]: + pass + + @mark_no_op + def visit_CompIf_test(self, node: "CompIf") -> None: + pass + + @mark_no_op + def leave_CompIf_test(self, node: "CompIf") -> None: + pass + + @mark_no_op + def visit_CompIf_whitespace_before(self, node: "CompIf") -> None: + pass + + @mark_no_op + def leave_CompIf_whitespace_before(self, node: "CompIf") -> None: + pass + + @mark_no_op + def visit_CompIf_whitespace_before_test(self, node: "CompIf") -> None: + pass + + @mark_no_op + def leave_CompIf_whitespace_before_test(self, node: "CompIf") -> None: + pass + + @mark_no_op + def visit_Comparison(self, node: "Comparison") -> Optional[bool]: + pass + + @mark_no_op + def visit_Comparison_left(self, node: "Comparison") -> None: + pass + + @mark_no_op + def leave_Comparison_left(self, node: "Comparison") -> None: + pass + + @mark_no_op + def visit_Comparison_comparisons(self, node: "Comparison") -> None: + pass + + @mark_no_op + def leave_Comparison_comparisons(self, node: "Comparison") -> None: + pass + + @mark_no_op + def visit_Comparison_lpar(self, node: "Comparison") -> None: + pass + + @mark_no_op + def leave_Comparison_lpar(self, node: "Comparison") -> None: + pass + + @mark_no_op + def visit_Comparison_rpar(self, node: "Comparison") -> None: + pass + + @mark_no_op + def leave_Comparison_rpar(self, node: "Comparison") -> None: + pass + + @mark_no_op + def visit_ComparisonTarget(self, node: "ComparisonTarget") -> Optional[bool]: + pass + + @mark_no_op + def visit_ComparisonTarget_operator(self, node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def leave_ComparisonTarget_operator(self, node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def visit_ComparisonTarget_comparator(self, node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def leave_ComparisonTarget_comparator(self, node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString(self, node: "ConcatenatedString") -> Optional[bool]: + pass + + @mark_no_op + def visit_ConcatenatedString_left(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_left(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString_right(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_right(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString_lpar(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_lpar(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString_rpar(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_rpar(self, node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def visit_ConcatenatedString_whitespace_between( + self, node: "ConcatenatedString" + ) -> None: + pass + + @mark_no_op + def leave_ConcatenatedString_whitespace_between( + self, node: "ConcatenatedString" + ) -> None: + pass + + @mark_no_op + def visit_Continue(self, node: "Continue") -> Optional[bool]: + pass + + @mark_no_op + def visit_Continue_semicolon(self, node: "Continue") -> None: + pass + + @mark_no_op + def leave_Continue_semicolon(self, node: "Continue") -> None: + pass + + @mark_no_op + def visit_Decorator(self, node: "Decorator") -> Optional[bool]: + pass + + @mark_no_op + def visit_Decorator_decorator(self, node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Decorator_decorator(self, node: "Decorator") -> None: + pass + + @mark_no_op + def visit_Decorator_leading_lines(self, node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Decorator_leading_lines(self, node: "Decorator") -> None: + pass + + @mark_no_op + def visit_Decorator_whitespace_after_at(self, node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Decorator_whitespace_after_at(self, node: "Decorator") -> None: + pass + + @mark_no_op + def visit_Decorator_trailing_whitespace(self, node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Decorator_trailing_whitespace(self, node: "Decorator") -> None: + pass + + @mark_no_op + def visit_Del(self, node: "Del") -> Optional[bool]: + pass + + @mark_no_op + def visit_Del_target(self, node: "Del") -> None: + pass + + @mark_no_op + def leave_Del_target(self, node: "Del") -> None: + pass + + @mark_no_op + def visit_Del_whitespace_after_del(self, node: "Del") -> None: + pass + + @mark_no_op + def leave_Del_whitespace_after_del(self, node: "Del") -> None: + pass + + @mark_no_op + def visit_Del_semicolon(self, node: "Del") -> None: + pass + + @mark_no_op + def leave_Del_semicolon(self, node: "Del") -> None: + pass + + @mark_no_op + def visit_Dict(self, node: "Dict") -> Optional[bool]: + pass + + @mark_no_op + def visit_Dict_elements(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_elements(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_Dict_lbrace(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_lbrace(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_Dict_rbrace(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_rbrace(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_Dict_lpar(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_lpar(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_Dict_rpar(self, node: "Dict") -> None: + pass + + @mark_no_op + def leave_Dict_rpar(self, node: "Dict") -> None: + pass + + @mark_no_op + def visit_DictComp(self, node: "DictComp") -> Optional[bool]: + pass + + @mark_no_op + def visit_DictComp_key(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_key(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_value(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_value(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_for_in(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_for_in(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_lbrace(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_lbrace(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_rbrace(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_rbrace(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_lpar(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_lpar(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_rpar(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_rpar(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_whitespace_before_colon(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_whitespace_before_colon(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictComp_whitespace_after_colon(self, node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictComp_whitespace_after_colon(self, node: "DictComp") -> None: + pass + + @mark_no_op + def visit_DictElement(self, node: "DictElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_DictElement_key(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_key(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_DictElement_value(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_value(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_DictElement_comma(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_comma(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_DictElement_whitespace_before_colon(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_whitespace_before_colon(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_DictElement_whitespace_after_colon(self, node: "DictElement") -> None: + pass + + @mark_no_op + def leave_DictElement_whitespace_after_colon(self, node: "DictElement") -> None: + pass + + @mark_no_op + def visit_Divide(self, node: "Divide") -> Optional[bool]: + pass + + @mark_no_op + def visit_Divide_whitespace_before(self, node: "Divide") -> None: + pass + + @mark_no_op + def leave_Divide_whitespace_before(self, node: "Divide") -> None: + pass + + @mark_no_op + def visit_Divide_whitespace_after(self, node: "Divide") -> None: + pass + + @mark_no_op + def leave_Divide_whitespace_after(self, node: "Divide") -> None: + pass + + @mark_no_op + def visit_DivideAssign(self, node: "DivideAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_DivideAssign_whitespace_before(self, node: "DivideAssign") -> None: + pass + + @mark_no_op + def leave_DivideAssign_whitespace_before(self, node: "DivideAssign") -> None: + pass + + @mark_no_op + def visit_DivideAssign_whitespace_after(self, node: "DivideAssign") -> None: + pass + + @mark_no_op + def leave_DivideAssign_whitespace_after(self, node: "DivideAssign") -> None: + pass + + @mark_no_op + def visit_Dot(self, node: "Dot") -> Optional[bool]: + pass + + @mark_no_op + def visit_Dot_whitespace_before(self, node: "Dot") -> None: + pass + + @mark_no_op + def leave_Dot_whitespace_before(self, node: "Dot") -> None: + pass + + @mark_no_op + def visit_Dot_whitespace_after(self, node: "Dot") -> None: + pass + + @mark_no_op + def leave_Dot_whitespace_after(self, node: "Dot") -> None: + pass + + @mark_no_op + def visit_Element(self, node: "Element") -> Optional[bool]: + pass + + @mark_no_op + def visit_Element_value(self, node: "Element") -> None: + pass + + @mark_no_op + def leave_Element_value(self, node: "Element") -> None: + pass + + @mark_no_op + def visit_Element_comma(self, node: "Element") -> None: + pass + + @mark_no_op + def leave_Element_comma(self, node: "Element") -> None: + pass + + @mark_no_op + def visit_Ellipsis(self, node: "Ellipsis") -> Optional[bool]: + pass + + @mark_no_op + def visit_Ellipsis_lpar(self, node: "Ellipsis") -> None: + pass + + @mark_no_op + def leave_Ellipsis_lpar(self, node: "Ellipsis") -> None: + pass + + @mark_no_op + def visit_Ellipsis_rpar(self, node: "Ellipsis") -> None: + pass + + @mark_no_op + def leave_Ellipsis_rpar(self, node: "Ellipsis") -> None: + pass + + @mark_no_op + def visit_Else(self, node: "Else") -> Optional[bool]: + pass + + @mark_no_op + def visit_Else_body(self, node: "Else") -> None: + pass + + @mark_no_op + def leave_Else_body(self, node: "Else") -> None: + pass + + @mark_no_op + def visit_Else_leading_lines(self, node: "Else") -> None: + pass + + @mark_no_op + def leave_Else_leading_lines(self, node: "Else") -> None: + pass + + @mark_no_op + def visit_Else_whitespace_before_colon(self, node: "Else") -> None: + pass + + @mark_no_op + def leave_Else_whitespace_before_colon(self, node: "Else") -> None: + pass + + @mark_no_op + def visit_EmptyLine(self, node: "EmptyLine") -> Optional[bool]: + pass + + @mark_no_op + def visit_EmptyLine_indent(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_EmptyLine_indent(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def visit_EmptyLine_whitespace(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_EmptyLine_whitespace(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def visit_EmptyLine_comment(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_EmptyLine_comment(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def visit_EmptyLine_newline(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_EmptyLine_newline(self, node: "EmptyLine") -> None: + pass + + @mark_no_op + def visit_Equal(self, node: "Equal") -> Optional[bool]: + pass + + @mark_no_op + def visit_Equal_whitespace_before(self, node: "Equal") -> None: + pass + + @mark_no_op + def leave_Equal_whitespace_before(self, node: "Equal") -> None: + pass + + @mark_no_op + def visit_Equal_whitespace_after(self, node: "Equal") -> None: + pass + + @mark_no_op + def leave_Equal_whitespace_after(self, node: "Equal") -> None: + pass + + @mark_no_op + def visit_ExceptHandler(self, node: "ExceptHandler") -> Optional[bool]: + pass + + @mark_no_op + def visit_ExceptHandler_body(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptHandler_body(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def visit_ExceptHandler_type(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptHandler_type(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def visit_ExceptHandler_name(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptHandler_name(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def visit_ExceptHandler_leading_lines(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptHandler_leading_lines(self, node: "ExceptHandler") -> None: + pass + + @mark_no_op + def visit_ExceptHandler_whitespace_after_except( + self, node: "ExceptHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptHandler_whitespace_after_except( + self, node: "ExceptHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptHandler_whitespace_before_colon( + self, node: "ExceptHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptHandler_whitespace_before_colon( + self, node: "ExceptHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler(self, node: "ExceptStarHandler") -> Optional[bool]: + pass + + @mark_no_op + def visit_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_body(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_type(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_name(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_leading_lines(self, node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_after_except( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_after_except( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_after_star( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_after_star( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def visit_ExceptStarHandler_whitespace_before_colon( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler_whitespace_before_colon( + self, node: "ExceptStarHandler" + ) -> None: + pass + + @mark_no_op + def visit_Expr(self, node: "Expr") -> Optional[bool]: + pass + + @mark_no_op + def visit_Expr_value(self, node: "Expr") -> None: + pass + + @mark_no_op + def leave_Expr_value(self, node: "Expr") -> None: + pass + + @mark_no_op + def visit_Expr_semicolon(self, node: "Expr") -> None: + pass + + @mark_no_op + def leave_Expr_semicolon(self, node: "Expr") -> None: + pass + + @mark_no_op + def visit_Finally(self, node: "Finally") -> Optional[bool]: + pass + + @mark_no_op + def visit_Finally_body(self, node: "Finally") -> None: + pass + + @mark_no_op + def leave_Finally_body(self, node: "Finally") -> None: + pass + + @mark_no_op + def visit_Finally_leading_lines(self, node: "Finally") -> None: + pass + + @mark_no_op + def leave_Finally_leading_lines(self, node: "Finally") -> None: + pass + + @mark_no_op + def visit_Finally_whitespace_before_colon(self, node: "Finally") -> None: + pass + + @mark_no_op + def leave_Finally_whitespace_before_colon(self, node: "Finally") -> None: + pass + + @mark_no_op + def visit_Float(self, node: "Float") -> Optional[bool]: + pass + + @mark_no_op + def visit_Float_value(self, node: "Float") -> None: + pass + + @mark_no_op + def leave_Float_value(self, node: "Float") -> None: + pass + + @mark_no_op + def visit_Float_lpar(self, node: "Float") -> None: + pass + + @mark_no_op + def leave_Float_lpar(self, node: "Float") -> None: + pass + + @mark_no_op + def visit_Float_rpar(self, node: "Float") -> None: + pass + + @mark_no_op + def leave_Float_rpar(self, node: "Float") -> None: + pass + + @mark_no_op + def visit_FloorDivide(self, node: "FloorDivide") -> Optional[bool]: + pass + + @mark_no_op + def visit_FloorDivide_whitespace_before(self, node: "FloorDivide") -> None: + pass + + @mark_no_op + def leave_FloorDivide_whitespace_before(self, node: "FloorDivide") -> None: + pass + + @mark_no_op + def visit_FloorDivide_whitespace_after(self, node: "FloorDivide") -> None: + pass + + @mark_no_op + def leave_FloorDivide_whitespace_after(self, node: "FloorDivide") -> None: + pass + + @mark_no_op + def visit_FloorDivideAssign(self, node: "FloorDivideAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_FloorDivideAssign_whitespace_before( + self, node: "FloorDivideAssign" + ) -> None: + pass + + @mark_no_op + def leave_FloorDivideAssign_whitespace_before( + self, node: "FloorDivideAssign" + ) -> None: + pass + + @mark_no_op + def visit_FloorDivideAssign_whitespace_after( + self, node: "FloorDivideAssign" + ) -> None: + pass + + @mark_no_op + def leave_FloorDivideAssign_whitespace_after( + self, node: "FloorDivideAssign" + ) -> None: + pass + + @mark_no_op + def visit_For(self, node: "For") -> Optional[bool]: + pass + + @mark_no_op + def visit_For_target(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_target(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_iter(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_iter(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_body(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_body(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_orelse(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_orelse(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_asynchronous(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_asynchronous(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_leading_lines(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_leading_lines(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_whitespace_after_for(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_whitespace_after_for(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_whitespace_before_in(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_whitespace_before_in(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_whitespace_after_in(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_whitespace_after_in(self, node: "For") -> None: + pass + + @mark_no_op + def visit_For_whitespace_before_colon(self, node: "For") -> None: + pass + + @mark_no_op + def leave_For_whitespace_before_colon(self, node: "For") -> None: + pass + + @mark_no_op + def visit_FormattedString(self, node: "FormattedString") -> Optional[bool]: + pass + + @mark_no_op + def visit_FormattedString_parts(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_parts(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedString_start(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_start(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedString_end(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_end(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedString_lpar(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_lpar(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedString_rpar(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedString_rpar(self, node: "FormattedString") -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression( + self, node: "FormattedStringExpression" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_FormattedStringExpression_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_conversion( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_conversion( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_format_spec( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_format_spec( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_whitespace_before_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_whitespace_before_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_whitespace_after_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_whitespace_after_expression( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringExpression_equal( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression_equal( + self, node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_FormattedStringText(self, node: "FormattedStringText") -> Optional[bool]: + pass + + @mark_no_op + def visit_FormattedStringText_value(self, node: "FormattedStringText") -> None: + pass + + @mark_no_op + def leave_FormattedStringText_value(self, node: "FormattedStringText") -> None: + pass + + @mark_no_op + def visit_From(self, node: "From") -> Optional[bool]: + pass + + @mark_no_op + def visit_From_item(self, node: "From") -> None: + pass + + @mark_no_op + def leave_From_item(self, node: "From") -> None: + pass + + @mark_no_op + def visit_From_whitespace_before_from(self, node: "From") -> None: + pass + + @mark_no_op + def leave_From_whitespace_before_from(self, node: "From") -> None: + pass + + @mark_no_op + def visit_From_whitespace_after_from(self, node: "From") -> None: + pass + + @mark_no_op + def leave_From_whitespace_after_from(self, node: "From") -> None: + pass + + @mark_no_op + def visit_FunctionDef(self, node: "FunctionDef") -> Optional[bool]: + pass + + @mark_no_op + def visit_FunctionDef_name(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_name(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_params(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_params(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_body(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_body(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_decorators(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_decorators(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_returns(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_returns(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_asynchronous(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_asynchronous(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_leading_lines(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_leading_lines(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_lines_after_decorators(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_lines_after_decorators(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_after_def(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_after_def(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_after_name(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_after_name(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_before_params(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_before_params(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_type_parameters(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_FunctionDef_type_parameters(self, node: "FunctionDef") -> None: + pass + + @mark_no_op + def visit_FunctionDef_whitespace_after_type_parameters( + self, node: "FunctionDef" + ) -> None: + pass + + @mark_no_op + def leave_FunctionDef_whitespace_after_type_parameters( + self, node: "FunctionDef" + ) -> None: + pass + + @mark_no_op + def visit_GeneratorExp(self, node: "GeneratorExp") -> Optional[bool]: + pass + + @mark_no_op + def visit_GeneratorExp_elt(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_GeneratorExp_elt(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def visit_GeneratorExp_for_in(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_GeneratorExp_for_in(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def visit_GeneratorExp_lpar(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_GeneratorExp_lpar(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def visit_GeneratorExp_rpar(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_GeneratorExp_rpar(self, node: "GeneratorExp") -> None: + pass + + @mark_no_op + def visit_Global(self, node: "Global") -> Optional[bool]: + pass + + @mark_no_op + def visit_Global_names(self, node: "Global") -> None: + pass + + @mark_no_op + def leave_Global_names(self, node: "Global") -> None: + pass + + @mark_no_op + def visit_Global_whitespace_after_global(self, node: "Global") -> None: + pass + + @mark_no_op + def leave_Global_whitespace_after_global(self, node: "Global") -> None: + pass + + @mark_no_op + def visit_Global_semicolon(self, node: "Global") -> None: + pass + + @mark_no_op + def leave_Global_semicolon(self, node: "Global") -> None: + pass + + @mark_no_op + def visit_GreaterThan(self, node: "GreaterThan") -> Optional[bool]: + pass + + @mark_no_op + def visit_GreaterThan_whitespace_before(self, node: "GreaterThan") -> None: + pass + + @mark_no_op + def leave_GreaterThan_whitespace_before(self, node: "GreaterThan") -> None: + pass + + @mark_no_op + def visit_GreaterThan_whitespace_after(self, node: "GreaterThan") -> None: + pass + + @mark_no_op + def leave_GreaterThan_whitespace_after(self, node: "GreaterThan") -> None: + pass + + @mark_no_op + def visit_GreaterThanEqual(self, node: "GreaterThanEqual") -> Optional[bool]: + pass + + @mark_no_op + def visit_GreaterThanEqual_whitespace_before( + self, node: "GreaterThanEqual" + ) -> None: + pass + + @mark_no_op + def leave_GreaterThanEqual_whitespace_before( + self, node: "GreaterThanEqual" + ) -> None: + pass + + @mark_no_op + def visit_GreaterThanEqual_whitespace_after(self, node: "GreaterThanEqual") -> None: + pass + + @mark_no_op + def leave_GreaterThanEqual_whitespace_after(self, node: "GreaterThanEqual") -> None: + pass + + @mark_no_op + def visit_If(self, node: "If") -> Optional[bool]: + pass + + @mark_no_op + def visit_If_test(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_test(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_body(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_body(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_orelse(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_orelse(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_leading_lines(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_leading_lines(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_whitespace_before_test(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_whitespace_before_test(self, node: "If") -> None: + pass + + @mark_no_op + def visit_If_whitespace_after_test(self, node: "If") -> None: + pass + + @mark_no_op + def leave_If_whitespace_after_test(self, node: "If") -> None: + pass + + @mark_no_op + def visit_IfExp(self, node: "IfExp") -> Optional[bool]: + pass + + @mark_no_op + def visit_IfExp_test(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_test(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_body(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_body(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_orelse(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_orelse(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_lpar(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_lpar(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_rpar(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_rpar(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_whitespace_before_if(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_whitespace_before_if(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_whitespace_after_if(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_whitespace_after_if(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_whitespace_before_else(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_whitespace_before_else(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_IfExp_whitespace_after_else(self, node: "IfExp") -> None: + pass + + @mark_no_op + def leave_IfExp_whitespace_after_else(self, node: "IfExp") -> None: + pass + + @mark_no_op + def visit_Imaginary(self, node: "Imaginary") -> Optional[bool]: + pass + + @mark_no_op + def visit_Imaginary_value(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def leave_Imaginary_value(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def visit_Imaginary_lpar(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def leave_Imaginary_lpar(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def visit_Imaginary_rpar(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def leave_Imaginary_rpar(self, node: "Imaginary") -> None: + pass + + @mark_no_op + def visit_Import(self, node: "Import") -> Optional[bool]: + pass + + @mark_no_op + def visit_Import_names(self, node: "Import") -> None: + pass + + @mark_no_op + def leave_Import_names(self, node: "Import") -> None: + pass + + @mark_no_op + def visit_Import_semicolon(self, node: "Import") -> None: + pass + + @mark_no_op + def leave_Import_semicolon(self, node: "Import") -> None: + pass + + @mark_no_op + def visit_Import_whitespace_after_import(self, node: "Import") -> None: + pass + + @mark_no_op + def leave_Import_whitespace_after_import(self, node: "Import") -> None: + pass + + @mark_no_op + def visit_ImportAlias(self, node: "ImportAlias") -> Optional[bool]: + pass + + @mark_no_op + def visit_ImportAlias_name(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def leave_ImportAlias_name(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def visit_ImportAlias_asname(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def leave_ImportAlias_asname(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def visit_ImportAlias_comma(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def leave_ImportAlias_comma(self, node: "ImportAlias") -> None: + pass + + @mark_no_op + def visit_ImportFrom(self, node: "ImportFrom") -> Optional[bool]: + pass + + @mark_no_op + def visit_ImportFrom_module(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_module(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_names(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_names(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_relative(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_relative(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_lpar(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_lpar(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_rpar(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_rpar(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_semicolon(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_semicolon(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_whitespace_after_from(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_whitespace_after_from(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_whitespace_before_import(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_whitespace_before_import(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportFrom_whitespace_after_import(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportFrom_whitespace_after_import(self, node: "ImportFrom") -> None: + pass + + @mark_no_op + def visit_ImportStar(self, node: "ImportStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_In(self, node: "In") -> Optional[bool]: + pass + + @mark_no_op + def visit_In_whitespace_before(self, node: "In") -> None: + pass + + @mark_no_op + def leave_In_whitespace_before(self, node: "In") -> None: + pass + + @mark_no_op + def visit_In_whitespace_after(self, node: "In") -> None: + pass + + @mark_no_op + def leave_In_whitespace_after(self, node: "In") -> None: + pass + + @mark_no_op + def visit_IndentedBlock(self, node: "IndentedBlock") -> Optional[bool]: + pass + + @mark_no_op + def visit_IndentedBlock_body(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_IndentedBlock_body(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def visit_IndentedBlock_header(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_IndentedBlock_header(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def visit_IndentedBlock_indent(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_IndentedBlock_indent(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def visit_IndentedBlock_footer(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_IndentedBlock_footer(self, node: "IndentedBlock") -> None: + pass + + @mark_no_op + def visit_Index(self, node: "Index") -> Optional[bool]: + pass + + @mark_no_op + def visit_Index_value(self, node: "Index") -> None: + pass + + @mark_no_op + def leave_Index_value(self, node: "Index") -> None: + pass + + @mark_no_op + def visit_Index_star(self, node: "Index") -> None: + pass + + @mark_no_op + def leave_Index_star(self, node: "Index") -> None: + pass + + @mark_no_op + def visit_Index_whitespace_after_star(self, node: "Index") -> None: + pass + + @mark_no_op + def leave_Index_whitespace_after_star(self, node: "Index") -> None: + pass + + @mark_no_op + def visit_Integer(self, node: "Integer") -> Optional[bool]: + pass + + @mark_no_op + def visit_Integer_value(self, node: "Integer") -> None: + pass + + @mark_no_op + def leave_Integer_value(self, node: "Integer") -> None: + pass + + @mark_no_op + def visit_Integer_lpar(self, node: "Integer") -> None: + pass + + @mark_no_op + def leave_Integer_lpar(self, node: "Integer") -> None: + pass + + @mark_no_op + def visit_Integer_rpar(self, node: "Integer") -> None: + pass + + @mark_no_op + def leave_Integer_rpar(self, node: "Integer") -> None: + pass + + @mark_no_op + def visit_Is(self, node: "Is") -> Optional[bool]: + pass + + @mark_no_op + def visit_Is_whitespace_before(self, node: "Is") -> None: + pass + + @mark_no_op + def leave_Is_whitespace_before(self, node: "Is") -> None: + pass + + @mark_no_op + def visit_Is_whitespace_after(self, node: "Is") -> None: + pass + + @mark_no_op + def leave_Is_whitespace_after(self, node: "Is") -> None: + pass + + @mark_no_op + def visit_IsNot(self, node: "IsNot") -> Optional[bool]: + pass + + @mark_no_op + def visit_IsNot_whitespace_before(self, node: "IsNot") -> None: + pass + + @mark_no_op + def leave_IsNot_whitespace_before(self, node: "IsNot") -> None: + pass + + @mark_no_op + def visit_IsNot_whitespace_between(self, node: "IsNot") -> None: + pass + + @mark_no_op + def leave_IsNot_whitespace_between(self, node: "IsNot") -> None: + pass + + @mark_no_op + def visit_IsNot_whitespace_after(self, node: "IsNot") -> None: + pass + + @mark_no_op + def leave_IsNot_whitespace_after(self, node: "IsNot") -> None: + pass + + @mark_no_op + def visit_Lambda(self, node: "Lambda") -> Optional[bool]: + pass + + @mark_no_op + def visit_Lambda_params(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_params(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_body(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_body(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_colon(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_colon(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_lpar(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_lpar(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_rpar(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_rpar(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_Lambda_whitespace_after_lambda(self, node: "Lambda") -> None: + pass + + @mark_no_op + def leave_Lambda_whitespace_after_lambda(self, node: "Lambda") -> None: + pass + + @mark_no_op + def visit_LeftCurlyBrace(self, node: "LeftCurlyBrace") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftCurlyBrace_whitespace_after(self, node: "LeftCurlyBrace") -> None: + pass + + @mark_no_op + def leave_LeftCurlyBrace_whitespace_after(self, node: "LeftCurlyBrace") -> None: + pass + + @mark_no_op + def visit_LeftParen(self, node: "LeftParen") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftParen_whitespace_after(self, node: "LeftParen") -> None: + pass + + @mark_no_op + def leave_LeftParen_whitespace_after(self, node: "LeftParen") -> None: + pass + + @mark_no_op + def visit_LeftShift(self, node: "LeftShift") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftShift_whitespace_before(self, node: "LeftShift") -> None: + pass + + @mark_no_op + def leave_LeftShift_whitespace_before(self, node: "LeftShift") -> None: + pass + + @mark_no_op + def visit_LeftShift_whitespace_after(self, node: "LeftShift") -> None: + pass + + @mark_no_op + def leave_LeftShift_whitespace_after(self, node: "LeftShift") -> None: + pass + + @mark_no_op + def visit_LeftShiftAssign(self, node: "LeftShiftAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftShiftAssign_whitespace_before(self, node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def leave_LeftShiftAssign_whitespace_before(self, node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def visit_LeftShiftAssign_whitespace_after(self, node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def leave_LeftShiftAssign_whitespace_after(self, node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def visit_LeftSquareBracket(self, node: "LeftSquareBracket") -> Optional[bool]: + pass + + @mark_no_op + def visit_LeftSquareBracket_whitespace_after( + self, node: "LeftSquareBracket" + ) -> None: + pass + + @mark_no_op + def leave_LeftSquareBracket_whitespace_after( + self, node: "LeftSquareBracket" + ) -> None: + pass + + @mark_no_op + def visit_LessThan(self, node: "LessThan") -> Optional[bool]: + pass + + @mark_no_op + def visit_LessThan_whitespace_before(self, node: "LessThan") -> None: + pass + + @mark_no_op + def leave_LessThan_whitespace_before(self, node: "LessThan") -> None: + pass + + @mark_no_op + def visit_LessThan_whitespace_after(self, node: "LessThan") -> None: + pass + + @mark_no_op + def leave_LessThan_whitespace_after(self, node: "LessThan") -> None: + pass + + @mark_no_op + def visit_LessThanEqual(self, node: "LessThanEqual") -> Optional[bool]: + pass + + @mark_no_op + def visit_LessThanEqual_whitespace_before(self, node: "LessThanEqual") -> None: + pass + + @mark_no_op + def leave_LessThanEqual_whitespace_before(self, node: "LessThanEqual") -> None: + pass + + @mark_no_op + def visit_LessThanEqual_whitespace_after(self, node: "LessThanEqual") -> None: + pass + + @mark_no_op + def leave_LessThanEqual_whitespace_after(self, node: "LessThanEqual") -> None: + pass + + @mark_no_op + def visit_List(self, node: "List") -> Optional[bool]: + pass + + @mark_no_op + def visit_List_elements(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_elements(self, node: "List") -> None: + pass + + @mark_no_op + def visit_List_lbracket(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_lbracket(self, node: "List") -> None: + pass + + @mark_no_op + def visit_List_rbracket(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_rbracket(self, node: "List") -> None: + pass + + @mark_no_op + def visit_List_lpar(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_lpar(self, node: "List") -> None: + pass + + @mark_no_op + def visit_List_rpar(self, node: "List") -> None: + pass + + @mark_no_op + def leave_List_rpar(self, node: "List") -> None: + pass + + @mark_no_op + def visit_ListComp(self, node: "ListComp") -> Optional[bool]: + pass + + @mark_no_op + def visit_ListComp_elt(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_elt(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_for_in(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_for_in(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_lbracket(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_lbracket(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_rbracket(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_rbracket(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_lpar(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_lpar(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_ListComp_rpar(self, node: "ListComp") -> None: + pass + + @mark_no_op + def leave_ListComp_rpar(self, node: "ListComp") -> None: + pass + + @mark_no_op + def visit_Match(self, node: "Match") -> Optional[bool]: + pass + + @mark_no_op + def visit_Match_subject(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_subject(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_cases(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_cases(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_leading_lines(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_leading_lines(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_after_match(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_after_match(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_before_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_before_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_whitespace_after_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_whitespace_after_colon(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_indent(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_indent(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_Match_footer(self, node: "Match") -> None: + pass + + @mark_no_op + def leave_Match_footer(self, node: "Match") -> None: + pass + + @mark_no_op + def visit_MatchAs(self, node: "MatchAs") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchAs_pattern(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_pattern(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_name(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_name(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_whitespace_before_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_whitespace_after_as(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_lpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_lpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchAs_rpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchAs_rpar(self, node: "MatchAs") -> None: + pass + + @mark_no_op + def visit_MatchCase(self, node: "MatchCase") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchCase_pattern(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_pattern(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_body(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_body(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_guard(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_guard(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_leading_lines(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_leading_lines(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_after_case(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_before_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_after_if(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchCase_whitespace_before_colon(self, node: "MatchCase") -> None: + pass + + @mark_no_op + def visit_MatchClass(self, node: "MatchClass") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchClass_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_after_cls(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_before_patterns(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_whitespace_after_kwds(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_lpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_lpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchClass_rpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchClass_rpar(self, node: "MatchClass") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement(self, node: "MatchKeywordElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_key(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_pattern(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_comma(self, node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_whitespace_before_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_whitespace_before_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchKeywordElement_whitespace_after_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement_whitespace_after_equal( + self, node: "MatchKeywordElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchList(self, node: "MatchList") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchList_patterns(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_patterns(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_lbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_lbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_rbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_rbracket(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_lpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_lpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchList_rpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchList_rpar(self, node: "MatchList") -> None: + pass + + @mark_no_op + def visit_MatchMapping(self, node: "MatchMapping") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchMapping_elements(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_elements(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_lbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_lbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rbrace(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_whitespace_before_rest(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_trailing_comma(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_lpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_lpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMapping_rpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMapping_rpar(self, node: "MatchMapping") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement(self, node: "MatchMappingElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_key(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_pattern(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_comma(self, node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_whitespace_before_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_whitespace_before_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchMappingElement_whitespace_after_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def leave_MatchMappingElement_whitespace_after_colon( + self, node: "MatchMappingElement" + ) -> None: + pass + + @mark_no_op + def visit_MatchOr(self, node: "MatchOr") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchOr_patterns(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_patterns(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOr_lpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_lpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOr_rpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOr_rpar(self, node: "MatchOr") -> None: + pass + + @mark_no_op + def visit_MatchOrElement(self, node: "MatchOrElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchOrElement_pattern(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def visit_MatchOrElement_separator(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchOrElement_separator(self, node: "MatchOrElement") -> None: + pass + + @mark_no_op + def visit_MatchPattern(self, node: "MatchPattern") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequence(self, node: "MatchSequence") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequenceElement( + self, node: "MatchSequenceElement" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement_value(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def visit_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement_comma(self, node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def visit_MatchSingleton(self, node: "MatchSingleton") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchSingleton_value(self, node: "MatchSingleton") -> None: + pass + + @mark_no_op + def leave_MatchSingleton_value(self, node: "MatchSingleton") -> None: + pass + + @mark_no_op + def visit_MatchStar(self, node: "MatchStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchStar_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchStar_comma(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_comma(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchStar_whitespace_before_name(self, node: "MatchStar") -> None: + pass + + @mark_no_op + def visit_MatchTuple(self, node: "MatchTuple") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchTuple_patterns(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_patterns(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchTuple_lpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_lpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchTuple_rpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchTuple_rpar(self, node: "MatchTuple") -> None: + pass + + @mark_no_op + def visit_MatchValue(self, node: "MatchValue") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatchValue_value(self, node: "MatchValue") -> None: + pass + + @mark_no_op + def leave_MatchValue_value(self, node: "MatchValue") -> None: + pass + + @mark_no_op + def visit_MatrixMultiply(self, node: "MatrixMultiply") -> Optional[bool]: + pass + + @mark_no_op + def visit_MatrixMultiply_whitespace_before(self, node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def leave_MatrixMultiply_whitespace_before(self, node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def visit_MatrixMultiply_whitespace_after(self, node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def leave_MatrixMultiply_whitespace_after(self, node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def visit_MatrixMultiplyAssign( + self, node: "MatrixMultiplyAssign" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_MatrixMultiplyAssign_whitespace_before( + self, node: "MatrixMultiplyAssign" + ) -> None: + pass + + @mark_no_op + def leave_MatrixMultiplyAssign_whitespace_before( + self, node: "MatrixMultiplyAssign" + ) -> None: + pass + + @mark_no_op + def visit_MatrixMultiplyAssign_whitespace_after( + self, node: "MatrixMultiplyAssign" + ) -> None: + pass + + @mark_no_op + def leave_MatrixMultiplyAssign_whitespace_after( + self, node: "MatrixMultiplyAssign" + ) -> None: + pass + + @mark_no_op + def visit_Minus(self, node: "Minus") -> Optional[bool]: + pass + + @mark_no_op + def visit_Minus_whitespace_after(self, node: "Minus") -> None: + pass + + @mark_no_op + def leave_Minus_whitespace_after(self, node: "Minus") -> None: + pass + + @mark_no_op + def visit_Module(self, node: "Module") -> Optional[bool]: + pass + + @mark_no_op + def visit_Module_body(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_body(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_header(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_header(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_footer(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_footer(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_encoding(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_encoding(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_default_indent(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_default_indent(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_default_newline(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_default_newline(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Module_has_trailing_newline(self, node: "Module") -> None: + pass + + @mark_no_op + def leave_Module_has_trailing_newline(self, node: "Module") -> None: + pass + + @mark_no_op + def visit_Modulo(self, node: "Modulo") -> Optional[bool]: + pass + + @mark_no_op + def visit_Modulo_whitespace_before(self, node: "Modulo") -> None: + pass + + @mark_no_op + def leave_Modulo_whitespace_before(self, node: "Modulo") -> None: + pass + + @mark_no_op + def visit_Modulo_whitespace_after(self, node: "Modulo") -> None: + pass + + @mark_no_op + def leave_Modulo_whitespace_after(self, node: "Modulo") -> None: + pass + + @mark_no_op + def visit_ModuloAssign(self, node: "ModuloAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_ModuloAssign_whitespace_before(self, node: "ModuloAssign") -> None: + pass + + @mark_no_op + def leave_ModuloAssign_whitespace_before(self, node: "ModuloAssign") -> None: + pass + + @mark_no_op + def visit_ModuloAssign_whitespace_after(self, node: "ModuloAssign") -> None: + pass + + @mark_no_op + def leave_ModuloAssign_whitespace_after(self, node: "ModuloAssign") -> None: + pass + + @mark_no_op + def visit_Multiply(self, node: "Multiply") -> Optional[bool]: + pass + + @mark_no_op + def visit_Multiply_whitespace_before(self, node: "Multiply") -> None: + pass + + @mark_no_op + def leave_Multiply_whitespace_before(self, node: "Multiply") -> None: + pass + + @mark_no_op + def visit_Multiply_whitespace_after(self, node: "Multiply") -> None: + pass + + @mark_no_op + def leave_Multiply_whitespace_after(self, node: "Multiply") -> None: + pass + + @mark_no_op + def visit_MultiplyAssign(self, node: "MultiplyAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_MultiplyAssign_whitespace_before(self, node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def leave_MultiplyAssign_whitespace_before(self, node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def visit_MultiplyAssign_whitespace_after(self, node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def leave_MultiplyAssign_whitespace_after(self, node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def visit_Name(self, node: "Name") -> Optional[bool]: + pass + + @mark_no_op + def visit_Name_value(self, node: "Name") -> None: + pass + + @mark_no_op + def leave_Name_value(self, node: "Name") -> None: + pass + + @mark_no_op + def visit_Name_lpar(self, node: "Name") -> None: + pass + + @mark_no_op + def leave_Name_lpar(self, node: "Name") -> None: + pass + + @mark_no_op + def visit_Name_rpar(self, node: "Name") -> None: + pass + + @mark_no_op + def leave_Name_rpar(self, node: "Name") -> None: + pass + + @mark_no_op + def visit_NameItem(self, node: "NameItem") -> Optional[bool]: + pass + + @mark_no_op + def visit_NameItem_name(self, node: "NameItem") -> None: + pass + + @mark_no_op + def leave_NameItem_name(self, node: "NameItem") -> None: + pass + + @mark_no_op + def visit_NameItem_comma(self, node: "NameItem") -> None: + pass + + @mark_no_op + def leave_NameItem_comma(self, node: "NameItem") -> None: + pass + + @mark_no_op + def visit_NamedExpr(self, node: "NamedExpr") -> Optional[bool]: + pass + + @mark_no_op + def visit_NamedExpr_target(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_target(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_value(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_value(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_lpar(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_lpar(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_rpar(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_rpar(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_whitespace_before_walrus(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_whitespace_before_walrus(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_NamedExpr_whitespace_after_walrus(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_NamedExpr_whitespace_after_walrus(self, node: "NamedExpr") -> None: + pass + + @mark_no_op + def visit_Newline(self, node: "Newline") -> Optional[bool]: + pass + + @mark_no_op + def visit_Newline_value(self, node: "Newline") -> None: + pass + + @mark_no_op + def leave_Newline_value(self, node: "Newline") -> None: + pass + + @mark_no_op + def visit_Nonlocal(self, node: "Nonlocal") -> Optional[bool]: + pass + + @mark_no_op + def visit_Nonlocal_names(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def leave_Nonlocal_names(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def visit_Nonlocal_whitespace_after_nonlocal(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def leave_Nonlocal_whitespace_after_nonlocal(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def visit_Nonlocal_semicolon(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def leave_Nonlocal_semicolon(self, node: "Nonlocal") -> None: + pass + + @mark_no_op + def visit_Not(self, node: "Not") -> Optional[bool]: + pass + + @mark_no_op + def visit_Not_whitespace_after(self, node: "Not") -> None: + pass + + @mark_no_op + def leave_Not_whitespace_after(self, node: "Not") -> None: + pass + + @mark_no_op + def visit_NotEqual(self, node: "NotEqual") -> Optional[bool]: + pass + + @mark_no_op + def visit_NotEqual_value(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def leave_NotEqual_value(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def visit_NotEqual_whitespace_before(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def leave_NotEqual_whitespace_before(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def visit_NotEqual_whitespace_after(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def leave_NotEqual_whitespace_after(self, node: "NotEqual") -> None: + pass + + @mark_no_op + def visit_NotIn(self, node: "NotIn") -> Optional[bool]: + pass + + @mark_no_op + def visit_NotIn_whitespace_before(self, node: "NotIn") -> None: + pass + + @mark_no_op + def leave_NotIn_whitespace_before(self, node: "NotIn") -> None: + pass + + @mark_no_op + def visit_NotIn_whitespace_between(self, node: "NotIn") -> None: + pass + + @mark_no_op + def leave_NotIn_whitespace_between(self, node: "NotIn") -> None: + pass + + @mark_no_op + def visit_NotIn_whitespace_after(self, node: "NotIn") -> None: + pass + + @mark_no_op + def leave_NotIn_whitespace_after(self, node: "NotIn") -> None: + pass + + @mark_no_op + def visit_Or(self, node: "Or") -> Optional[bool]: + pass + + @mark_no_op + def visit_Or_whitespace_before(self, node: "Or") -> None: + pass + + @mark_no_op + def leave_Or_whitespace_before(self, node: "Or") -> None: + pass + + @mark_no_op + def visit_Or_whitespace_after(self, node: "Or") -> None: + pass + + @mark_no_op + def leave_Or_whitespace_after(self, node: "Or") -> None: + pass + + @mark_no_op + def visit_Param(self, node: "Param") -> Optional[bool]: + pass + + @mark_no_op + def visit_Param_name(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_name(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_annotation(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_annotation(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_equal(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_equal(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_default(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_default(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_comma(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_comma(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_star(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_star(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_whitespace_after_star(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_whitespace_after_star(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_Param_whitespace_after_param(self, node: "Param") -> None: + pass + + @mark_no_op + def leave_Param_whitespace_after_param(self, node: "Param") -> None: + pass + + @mark_no_op + def visit_ParamSlash(self, node: "ParamSlash") -> Optional[bool]: + pass + + @mark_no_op + def visit_ParamSlash_comma(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def leave_ParamSlash_comma(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def visit_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def leave_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: + pass + + @mark_no_op + def visit_ParamSpec(self, node: "ParamSpec") -> Optional[bool]: + pass + + @mark_no_op + def visit_ParamSpec_name(self, node: "ParamSpec") -> None: + pass + + @mark_no_op + def leave_ParamSpec_name(self, node: "ParamSpec") -> None: + pass + + @mark_no_op + def visit_ParamSpec_whitespace_after_star(self, node: "ParamSpec") -> None: + pass + + @mark_no_op + def leave_ParamSpec_whitespace_after_star(self, node: "ParamSpec") -> None: + pass + + @mark_no_op + def visit_ParamStar(self, node: "ParamStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_ParamStar_comma(self, node: "ParamStar") -> None: + pass + + @mark_no_op + def leave_ParamStar_comma(self, node: "ParamStar") -> None: + pass + + @mark_no_op + def visit_Parameters(self, node: "Parameters") -> Optional[bool]: + pass + + @mark_no_op + def visit_Parameters_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_star_arg(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_star_arg(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_kwonly_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_kwonly_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_star_kwarg(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_star_kwarg(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_posonly_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_posonly_params(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_Parameters_posonly_ind(self, node: "Parameters") -> None: + pass + + @mark_no_op + def leave_Parameters_posonly_ind(self, node: "Parameters") -> None: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace( + self, node: "ParenthesizedWhitespace" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace_first_line( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace_first_line( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace_empty_lines( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace_empty_lines( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace_indent( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace_indent( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def visit_ParenthesizedWhitespace_last_line( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace_last_line( + self, node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def visit_Pass(self, node: "Pass") -> Optional[bool]: + pass + + @mark_no_op + def visit_Pass_semicolon(self, node: "Pass") -> None: + pass + + @mark_no_op + def leave_Pass_semicolon(self, node: "Pass") -> None: + pass + + @mark_no_op + def visit_Plus(self, node: "Plus") -> Optional[bool]: + pass + + @mark_no_op + def visit_Plus_whitespace_after(self, node: "Plus") -> None: + pass + + @mark_no_op + def leave_Plus_whitespace_after(self, node: "Plus") -> None: + pass + + @mark_no_op + def visit_Power(self, node: "Power") -> Optional[bool]: + pass + + @mark_no_op + def visit_Power_whitespace_before(self, node: "Power") -> None: + pass + + @mark_no_op + def leave_Power_whitespace_before(self, node: "Power") -> None: + pass + + @mark_no_op + def visit_Power_whitespace_after(self, node: "Power") -> None: + pass + + @mark_no_op + def leave_Power_whitespace_after(self, node: "Power") -> None: + pass + + @mark_no_op + def visit_PowerAssign(self, node: "PowerAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_PowerAssign_whitespace_before(self, node: "PowerAssign") -> None: + pass + + @mark_no_op + def leave_PowerAssign_whitespace_before(self, node: "PowerAssign") -> None: + pass + + @mark_no_op + def visit_PowerAssign_whitespace_after(self, node: "PowerAssign") -> None: + pass + + @mark_no_op + def leave_PowerAssign_whitespace_after(self, node: "PowerAssign") -> None: + pass + + @mark_no_op + def visit_Raise(self, node: "Raise") -> Optional[bool]: + pass + + @mark_no_op + def visit_Raise_exc(self, node: "Raise") -> None: + pass + + @mark_no_op + def leave_Raise_exc(self, node: "Raise") -> None: + pass + + @mark_no_op + def visit_Raise_cause(self, node: "Raise") -> None: + pass + + @mark_no_op + def leave_Raise_cause(self, node: "Raise") -> None: + pass + + @mark_no_op + def visit_Raise_whitespace_after_raise(self, node: "Raise") -> None: + pass + + @mark_no_op + def leave_Raise_whitespace_after_raise(self, node: "Raise") -> None: + pass + + @mark_no_op + def visit_Raise_semicolon(self, node: "Raise") -> None: + pass + + @mark_no_op + def leave_Raise_semicolon(self, node: "Raise") -> None: + pass + + @mark_no_op + def visit_Return(self, node: "Return") -> Optional[bool]: + pass + + @mark_no_op + def visit_Return_value(self, node: "Return") -> None: + pass + + @mark_no_op + def leave_Return_value(self, node: "Return") -> None: + pass + + @mark_no_op + def visit_Return_whitespace_after_return(self, node: "Return") -> None: + pass + + @mark_no_op + def leave_Return_whitespace_after_return(self, node: "Return") -> None: + pass + + @mark_no_op + def visit_Return_semicolon(self, node: "Return") -> None: + pass + + @mark_no_op + def leave_Return_semicolon(self, node: "Return") -> None: + pass + + @mark_no_op + def visit_RightCurlyBrace(self, node: "RightCurlyBrace") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightCurlyBrace_whitespace_before(self, node: "RightCurlyBrace") -> None: + pass + + @mark_no_op + def leave_RightCurlyBrace_whitespace_before(self, node: "RightCurlyBrace") -> None: + pass + + @mark_no_op + def visit_RightParen(self, node: "RightParen") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightParen_whitespace_before(self, node: "RightParen") -> None: + pass + + @mark_no_op + def leave_RightParen_whitespace_before(self, node: "RightParen") -> None: + pass + + @mark_no_op + def visit_RightShift(self, node: "RightShift") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightShift_whitespace_before(self, node: "RightShift") -> None: + pass + + @mark_no_op + def leave_RightShift_whitespace_before(self, node: "RightShift") -> None: + pass + + @mark_no_op + def visit_RightShift_whitespace_after(self, node: "RightShift") -> None: + pass + + @mark_no_op + def leave_RightShift_whitespace_after(self, node: "RightShift") -> None: + pass + + @mark_no_op + def visit_RightShiftAssign(self, node: "RightShiftAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightShiftAssign_whitespace_before( + self, node: "RightShiftAssign" + ) -> None: + pass + + @mark_no_op + def leave_RightShiftAssign_whitespace_before( + self, node: "RightShiftAssign" + ) -> None: + pass + + @mark_no_op + def visit_RightShiftAssign_whitespace_after(self, node: "RightShiftAssign") -> None: + pass + + @mark_no_op + def leave_RightShiftAssign_whitespace_after(self, node: "RightShiftAssign") -> None: + pass + + @mark_no_op + def visit_RightSquareBracket(self, node: "RightSquareBracket") -> Optional[bool]: + pass + + @mark_no_op + def visit_RightSquareBracket_whitespace_before( + self, node: "RightSquareBracket" + ) -> None: + pass + + @mark_no_op + def leave_RightSquareBracket_whitespace_before( + self, node: "RightSquareBracket" + ) -> None: + pass + + @mark_no_op + def visit_Semicolon(self, node: "Semicolon") -> Optional[bool]: + pass + + @mark_no_op + def visit_Semicolon_whitespace_before(self, node: "Semicolon") -> None: + pass + + @mark_no_op + def leave_Semicolon_whitespace_before(self, node: "Semicolon") -> None: + pass + + @mark_no_op + def visit_Semicolon_whitespace_after(self, node: "Semicolon") -> None: + pass + + @mark_no_op + def leave_Semicolon_whitespace_after(self, node: "Semicolon") -> None: + pass + + @mark_no_op + def visit_Set(self, node: "Set") -> Optional[bool]: + pass + + @mark_no_op + def visit_Set_elements(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_elements(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_Set_lbrace(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_lbrace(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_Set_rbrace(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_rbrace(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_Set_lpar(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_lpar(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_Set_rpar(self, node: "Set") -> None: + pass + + @mark_no_op + def leave_Set_rpar(self, node: "Set") -> None: + pass + + @mark_no_op + def visit_SetComp(self, node: "SetComp") -> Optional[bool]: + pass + + @mark_no_op + def visit_SetComp_elt(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_elt(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_for_in(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_for_in(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_lbrace(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_lbrace(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_rbrace(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_rbrace(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_lpar(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_lpar(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SetComp_rpar(self, node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SetComp_rpar(self, node: "SetComp") -> None: + pass + + @mark_no_op + def visit_SimpleStatementLine(self, node: "SimpleStatementLine") -> Optional[bool]: + pass + + @mark_no_op + def visit_SimpleStatementLine_body(self, node: "SimpleStatementLine") -> None: + pass + + @mark_no_op + def leave_SimpleStatementLine_body(self, node: "SimpleStatementLine") -> None: + pass + + @mark_no_op + def visit_SimpleStatementLine_leading_lines( + self, node: "SimpleStatementLine" + ) -> None: + pass + + @mark_no_op + def leave_SimpleStatementLine_leading_lines( + self, node: "SimpleStatementLine" + ) -> None: + pass + + @mark_no_op + def visit_SimpleStatementLine_trailing_whitespace( + self, node: "SimpleStatementLine" + ) -> None: + pass + + @mark_no_op + def leave_SimpleStatementLine_trailing_whitespace( + self, node: "SimpleStatementLine" + ) -> None: + pass + + @mark_no_op + def visit_SimpleStatementSuite( + self, node: "SimpleStatementSuite" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_SimpleStatementSuite_body(self, node: "SimpleStatementSuite") -> None: + pass + + @mark_no_op + def leave_SimpleStatementSuite_body(self, node: "SimpleStatementSuite") -> None: + pass + + @mark_no_op + def visit_SimpleStatementSuite_leading_whitespace( + self, node: "SimpleStatementSuite" + ) -> None: + pass + + @mark_no_op + def leave_SimpleStatementSuite_leading_whitespace( + self, node: "SimpleStatementSuite" + ) -> None: + pass + + @mark_no_op + def visit_SimpleStatementSuite_trailing_whitespace( + self, node: "SimpleStatementSuite" + ) -> None: + pass + + @mark_no_op + def leave_SimpleStatementSuite_trailing_whitespace( + self, node: "SimpleStatementSuite" + ) -> None: + pass + + @mark_no_op + def visit_SimpleString(self, node: "SimpleString") -> Optional[bool]: + pass + + @mark_no_op + def visit_SimpleString_value(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def leave_SimpleString_value(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def visit_SimpleString_lpar(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def leave_SimpleString_lpar(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def visit_SimpleString_rpar(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def leave_SimpleString_rpar(self, node: "SimpleString") -> None: + pass + + @mark_no_op + def visit_SimpleWhitespace(self, node: "SimpleWhitespace") -> Optional[bool]: + pass + + @mark_no_op + def visit_SimpleWhitespace_value(self, node: "SimpleWhitespace") -> None: + pass + + @mark_no_op + def leave_SimpleWhitespace_value(self, node: "SimpleWhitespace") -> None: + pass + + @mark_no_op + def visit_Slice(self, node: "Slice") -> Optional[bool]: + pass + + @mark_no_op + def visit_Slice_lower(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_lower(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_Slice_upper(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_upper(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_Slice_step(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_step(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_Slice_first_colon(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_first_colon(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_Slice_second_colon(self, node: "Slice") -> None: + pass + + @mark_no_op + def leave_Slice_second_colon(self, node: "Slice") -> None: + pass + + @mark_no_op + def visit_StarredDictElement(self, node: "StarredDictElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_StarredDictElement_value(self, node: "StarredDictElement") -> None: + pass + + @mark_no_op + def leave_StarredDictElement_value(self, node: "StarredDictElement") -> None: + pass + + @mark_no_op + def visit_StarredDictElement_comma(self, node: "StarredDictElement") -> None: + pass + + @mark_no_op + def leave_StarredDictElement_comma(self, node: "StarredDictElement") -> None: + pass + + @mark_no_op + def visit_StarredDictElement_whitespace_before_value( + self, node: "StarredDictElement" + ) -> None: + pass + + @mark_no_op + def leave_StarredDictElement_whitespace_before_value( + self, node: "StarredDictElement" + ) -> None: + pass + + @mark_no_op + def visit_StarredElement(self, node: "StarredElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_StarredElement_value(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_StarredElement_value(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def visit_StarredElement_comma(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_StarredElement_comma(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def visit_StarredElement_lpar(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_StarredElement_lpar(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def visit_StarredElement_rpar(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_StarredElement_rpar(self, node: "StarredElement") -> None: + pass + + @mark_no_op + def visit_StarredElement_whitespace_before_value( + self, node: "StarredElement" + ) -> None: + pass + + @mark_no_op + def leave_StarredElement_whitespace_before_value( + self, node: "StarredElement" + ) -> None: + pass + + @mark_no_op + def visit_Subscript(self, node: "Subscript") -> Optional[bool]: + pass + + @mark_no_op + def visit_Subscript_value(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_value(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_slice(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_slice(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_lbracket(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_lbracket(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_rbracket(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_rbracket(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_lpar(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_lpar(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_rpar(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_rpar(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_Subscript_whitespace_after_value(self, node: "Subscript") -> None: + pass + + @mark_no_op + def leave_Subscript_whitespace_after_value(self, node: "Subscript") -> None: + pass + + @mark_no_op + def visit_SubscriptElement(self, node: "SubscriptElement") -> Optional[bool]: + pass + + @mark_no_op + def visit_SubscriptElement_slice(self, node: "SubscriptElement") -> None: + pass + + @mark_no_op + def leave_SubscriptElement_slice(self, node: "SubscriptElement") -> None: + pass + + @mark_no_op + def visit_SubscriptElement_comma(self, node: "SubscriptElement") -> None: + pass + + @mark_no_op + def leave_SubscriptElement_comma(self, node: "SubscriptElement") -> None: + pass + + @mark_no_op + def visit_Subtract(self, node: "Subtract") -> Optional[bool]: + pass + + @mark_no_op + def visit_Subtract_whitespace_before(self, node: "Subtract") -> None: + pass + + @mark_no_op + def leave_Subtract_whitespace_before(self, node: "Subtract") -> None: + pass + + @mark_no_op + def visit_Subtract_whitespace_after(self, node: "Subtract") -> None: + pass + + @mark_no_op + def leave_Subtract_whitespace_after(self, node: "Subtract") -> None: + pass + + @mark_no_op + def visit_SubtractAssign(self, node: "SubtractAssign") -> Optional[bool]: + pass + + @mark_no_op + def visit_SubtractAssign_whitespace_before(self, node: "SubtractAssign") -> None: + pass + + @mark_no_op + def leave_SubtractAssign_whitespace_before(self, node: "SubtractAssign") -> None: + pass + + @mark_no_op + def visit_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: + pass + + @mark_no_op + def leave_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: + pass + + @mark_no_op + def visit_TrailingWhitespace(self, node: "TrailingWhitespace") -> Optional[bool]: + pass + + @mark_no_op + def visit_TrailingWhitespace_whitespace(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def leave_TrailingWhitespace_whitespace(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def visit_TrailingWhitespace_comment(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def leave_TrailingWhitespace_comment(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def visit_TrailingWhitespace_newline(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def leave_TrailingWhitespace_newline(self, node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def visit_Try(self, node: "Try") -> Optional[bool]: + pass + + @mark_no_op + def visit_Try_body(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_body(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_handlers(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_handlers(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_orelse(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_orelse(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_finalbody(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_finalbody(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_leading_lines(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_leading_lines(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_Try_whitespace_before_colon(self, node: "Try") -> None: + pass + + @mark_no_op + def leave_Try_whitespace_before_colon(self, node: "Try") -> None: + pass + + @mark_no_op + def visit_TryStar(self, node: "TryStar") -> Optional[bool]: + pass + + @mark_no_op + def visit_TryStar_body(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_body(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_handlers(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_handlers(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_orelse(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_orelse(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_finalbody(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_finalbody(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_leading_lines(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_leading_lines(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: + pass + + @mark_no_op + def leave_TryStar_whitespace_before_colon(self, node: "TryStar") -> None: + pass + + @mark_no_op + def visit_Tuple(self, node: "Tuple") -> Optional[bool]: + pass + + @mark_no_op + def visit_Tuple_elements(self, node: "Tuple") -> None: + pass + + @mark_no_op + def leave_Tuple_elements(self, node: "Tuple") -> None: + pass + + @mark_no_op + def visit_Tuple_lpar(self, node: "Tuple") -> None: + pass + + @mark_no_op + def leave_Tuple_lpar(self, node: "Tuple") -> None: + pass + + @mark_no_op + def visit_Tuple_rpar(self, node: "Tuple") -> None: + pass + + @mark_no_op + def leave_Tuple_rpar(self, node: "Tuple") -> None: + pass + + @mark_no_op + def visit_TypeAlias(self, node: "TypeAlias") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeAlias_name(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_name(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_value(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_value(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_type_parameters(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_type_parameters(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_whitespace_after_type(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_whitespace_after_type(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_whitespace_after_name(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_whitespace_after_name(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_whitespace_after_type_parameters( + self, node: "TypeAlias" + ) -> None: + pass + + @mark_no_op + def leave_TypeAlias_whitespace_after_type_parameters( + self, node: "TypeAlias" + ) -> None: + pass + + @mark_no_op + def visit_TypeAlias_whitespace_after_equals(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_whitespace_after_equals(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeAlias_semicolon(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeAlias_semicolon(self, node: "TypeAlias") -> None: + pass + + @mark_no_op + def visit_TypeParam(self, node: "TypeParam") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeParam_param(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParam_param(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def visit_TypeParam_comma(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParam_comma(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def visit_TypeParam_equal(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParam_equal(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def visit_TypeParam_star(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParam_star(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def visit_TypeParam_whitespace_after_star(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParam_whitespace_after_star(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def visit_TypeParam_default(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParam_default(self, node: "TypeParam") -> None: + pass + + @mark_no_op + def visit_TypeParameters(self, node: "TypeParameters") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeParameters_params(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def leave_TypeParameters_params(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def visit_TypeParameters_lbracket(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def leave_TypeParameters_lbracket(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def visit_TypeParameters_rbracket(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def leave_TypeParameters_rbracket(self, node: "TypeParameters") -> None: + pass + + @mark_no_op + def visit_TypeVar(self, node: "TypeVar") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeVar_name(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def leave_TypeVar_name(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def visit_TypeVar_bound(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def leave_TypeVar_bound(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def visit_TypeVar_colon(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def leave_TypeVar_colon(self, node: "TypeVar") -> None: + pass + + @mark_no_op + def visit_TypeVarTuple(self, node: "TypeVarTuple") -> Optional[bool]: + pass + + @mark_no_op + def visit_TypeVarTuple_name(self, node: "TypeVarTuple") -> None: + pass + + @mark_no_op + def leave_TypeVarTuple_name(self, node: "TypeVarTuple") -> None: + pass + + @mark_no_op + def visit_TypeVarTuple_whitespace_after_star(self, node: "TypeVarTuple") -> None: + pass + + @mark_no_op + def leave_TypeVarTuple_whitespace_after_star(self, node: "TypeVarTuple") -> None: + pass + + @mark_no_op + def visit_UnaryOperation(self, node: "UnaryOperation") -> Optional[bool]: + pass + + @mark_no_op + def visit_UnaryOperation_operator(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_UnaryOperation_operator(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def visit_UnaryOperation_expression(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_UnaryOperation_expression(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def visit_UnaryOperation_lpar(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_UnaryOperation_lpar(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def visit_UnaryOperation_rpar(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_UnaryOperation_rpar(self, node: "UnaryOperation") -> None: + pass + + @mark_no_op + def visit_While(self, node: "While") -> Optional[bool]: + pass + + @mark_no_op + def visit_While_test(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_test(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_body(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_body(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_orelse(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_orelse(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_leading_lines(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_leading_lines(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_whitespace_after_while(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_whitespace_after_while(self, node: "While") -> None: + pass + + @mark_no_op + def visit_While_whitespace_before_colon(self, node: "While") -> None: + pass + + @mark_no_op + def leave_While_whitespace_before_colon(self, node: "While") -> None: + pass + + @mark_no_op + def visit_With(self, node: "With") -> Optional[bool]: + pass + + @mark_no_op + def visit_With_items(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_items(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_body(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_body(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_asynchronous(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_asynchronous(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_leading_lines(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_leading_lines(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_lpar(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_lpar(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_rpar(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_rpar(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_whitespace_after_with(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_whitespace_after_with(self, node: "With") -> None: + pass + + @mark_no_op + def visit_With_whitespace_before_colon(self, node: "With") -> None: + pass + + @mark_no_op + def leave_With_whitespace_before_colon(self, node: "With") -> None: + pass + + @mark_no_op + def visit_WithItem(self, node: "WithItem") -> Optional[bool]: + pass + + @mark_no_op + def visit_WithItem_item(self, node: "WithItem") -> None: + pass + + @mark_no_op + def leave_WithItem_item(self, node: "WithItem") -> None: + pass + + @mark_no_op + def visit_WithItem_asname(self, node: "WithItem") -> None: + pass + + @mark_no_op + def leave_WithItem_asname(self, node: "WithItem") -> None: + pass + + @mark_no_op + def visit_WithItem_comma(self, node: "WithItem") -> None: + pass + + @mark_no_op + def leave_WithItem_comma(self, node: "WithItem") -> None: + pass + + @mark_no_op + def visit_Yield(self, node: "Yield") -> Optional[bool]: + pass + + @mark_no_op + def visit_Yield_value(self, node: "Yield") -> None: + pass + + @mark_no_op + def leave_Yield_value(self, node: "Yield") -> None: + pass + + @mark_no_op + def visit_Yield_lpar(self, node: "Yield") -> None: + pass + + @mark_no_op + def leave_Yield_lpar(self, node: "Yield") -> None: + pass + + @mark_no_op + def visit_Yield_rpar(self, node: "Yield") -> None: + pass + + @mark_no_op + def leave_Yield_rpar(self, node: "Yield") -> None: + pass + + @mark_no_op + def visit_Yield_whitespace_after_yield(self, node: "Yield") -> None: + pass + + @mark_no_op + def leave_Yield_whitespace_after_yield(self, node: "Yield") -> None: + pass + + +class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): + @mark_no_op + def leave_Add(self, original_node: "Add") -> None: + pass + + @mark_no_op + def leave_AddAssign(self, original_node: "AddAssign") -> None: + pass + + @mark_no_op + def leave_And(self, original_node: "And") -> None: + pass + + @mark_no_op + def leave_AnnAssign(self, original_node: "AnnAssign") -> None: + pass + + @mark_no_op + def leave_Annotation(self, original_node: "Annotation") -> None: + pass + + @mark_no_op + def leave_Arg(self, original_node: "Arg") -> None: + pass + + @mark_no_op + def leave_AsName(self, original_node: "AsName") -> None: + pass + + @mark_no_op + def leave_Assert(self, original_node: "Assert") -> None: + pass + + @mark_no_op + def leave_Assign(self, original_node: "Assign") -> None: + pass + + @mark_no_op + def leave_AssignEqual(self, original_node: "AssignEqual") -> None: + pass + + @mark_no_op + def leave_AssignTarget(self, original_node: "AssignTarget") -> None: + pass + + @mark_no_op + def leave_Asynchronous(self, original_node: "Asynchronous") -> None: + pass + + @mark_no_op + def leave_Attribute(self, original_node: "Attribute") -> None: + pass + + @mark_no_op + def leave_AugAssign(self, original_node: "AugAssign") -> None: + pass + + @mark_no_op + def leave_Await(self, original_node: "Await") -> None: + pass + + @mark_no_op + def leave_BinaryOperation(self, original_node: "BinaryOperation") -> None: + pass + + @mark_no_op + def leave_BitAnd(self, original_node: "BitAnd") -> None: + pass + + @mark_no_op + def leave_BitAndAssign(self, original_node: "BitAndAssign") -> None: + pass + + @mark_no_op + def leave_BitInvert(self, original_node: "BitInvert") -> None: + pass + + @mark_no_op + def leave_BitOr(self, original_node: "BitOr") -> None: + pass + + @mark_no_op + def leave_BitOrAssign(self, original_node: "BitOrAssign") -> None: + pass + + @mark_no_op + def leave_BitXor(self, original_node: "BitXor") -> None: + pass + + @mark_no_op + def leave_BitXorAssign(self, original_node: "BitXorAssign") -> None: + pass + + @mark_no_op + def leave_BooleanOperation(self, original_node: "BooleanOperation") -> None: + pass + + @mark_no_op + def leave_Break(self, original_node: "Break") -> None: + pass + + @mark_no_op + def leave_Call(self, original_node: "Call") -> None: + pass + + @mark_no_op + def leave_ClassDef(self, original_node: "ClassDef") -> None: + pass + + @mark_no_op + def leave_Colon(self, original_node: "Colon") -> None: + pass + + @mark_no_op + def leave_Comma(self, original_node: "Comma") -> None: + pass + + @mark_no_op + def leave_Comment(self, original_node: "Comment") -> None: + pass + + @mark_no_op + def leave_CompFor(self, original_node: "CompFor") -> None: + pass + + @mark_no_op + def leave_CompIf(self, original_node: "CompIf") -> None: + pass + + @mark_no_op + def leave_Comparison(self, original_node: "Comparison") -> None: + pass + + @mark_no_op + def leave_ComparisonTarget(self, original_node: "ComparisonTarget") -> None: + pass + + @mark_no_op + def leave_ConcatenatedString(self, original_node: "ConcatenatedString") -> None: + pass + + @mark_no_op + def leave_Continue(self, original_node: "Continue") -> None: + pass + + @mark_no_op + def leave_Decorator(self, original_node: "Decorator") -> None: + pass + + @mark_no_op + def leave_Del(self, original_node: "Del") -> None: + pass + + @mark_no_op + def leave_Dict(self, original_node: "Dict") -> None: + pass + + @mark_no_op + def leave_DictComp(self, original_node: "DictComp") -> None: + pass + + @mark_no_op + def leave_DictElement(self, original_node: "DictElement") -> None: + pass + + @mark_no_op + def leave_Divide(self, original_node: "Divide") -> None: + pass + + @mark_no_op + def leave_DivideAssign(self, original_node: "DivideAssign") -> None: + pass + + @mark_no_op + def leave_Dot(self, original_node: "Dot") -> None: + pass + + @mark_no_op + def leave_Element(self, original_node: "Element") -> None: + pass + + @mark_no_op + def leave_Ellipsis(self, original_node: "Ellipsis") -> None: + pass + + @mark_no_op + def leave_Else(self, original_node: "Else") -> None: + pass + + @mark_no_op + def leave_EmptyLine(self, original_node: "EmptyLine") -> None: + pass + + @mark_no_op + def leave_Equal(self, original_node: "Equal") -> None: + pass + + @mark_no_op + def leave_ExceptHandler(self, original_node: "ExceptHandler") -> None: + pass + + @mark_no_op + def leave_ExceptStarHandler(self, original_node: "ExceptStarHandler") -> None: + pass + + @mark_no_op + def leave_Expr(self, original_node: "Expr") -> None: + pass + + @mark_no_op + def leave_Finally(self, original_node: "Finally") -> None: + pass + + @mark_no_op + def leave_Float(self, original_node: "Float") -> None: + pass + + @mark_no_op + def leave_FloorDivide(self, original_node: "FloorDivide") -> None: + pass + + @mark_no_op + def leave_FloorDivideAssign(self, original_node: "FloorDivideAssign") -> None: + pass + + @mark_no_op + def leave_For(self, original_node: "For") -> None: + pass + + @mark_no_op + def leave_FormattedString(self, original_node: "FormattedString") -> None: + pass + + @mark_no_op + def leave_FormattedStringExpression( + self, original_node: "FormattedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_FormattedStringText(self, original_node: "FormattedStringText") -> None: + pass + + @mark_no_op + def leave_From(self, original_node: "From") -> None: + pass + + @mark_no_op + def leave_FunctionDef(self, original_node: "FunctionDef") -> None: + pass + + @mark_no_op + def leave_GeneratorExp(self, original_node: "GeneratorExp") -> None: + pass + + @mark_no_op + def leave_Global(self, original_node: "Global") -> None: + pass + + @mark_no_op + def leave_GreaterThan(self, original_node: "GreaterThan") -> None: + pass + + @mark_no_op + def leave_GreaterThanEqual(self, original_node: "GreaterThanEqual") -> None: + pass + + @mark_no_op + def leave_If(self, original_node: "If") -> None: + pass + + @mark_no_op + def leave_IfExp(self, original_node: "IfExp") -> None: + pass + + @mark_no_op + def leave_Imaginary(self, original_node: "Imaginary") -> None: + pass + + @mark_no_op + def leave_Import(self, original_node: "Import") -> None: + pass + + @mark_no_op + def leave_ImportAlias(self, original_node: "ImportAlias") -> None: + pass + + @mark_no_op + def leave_ImportFrom(self, original_node: "ImportFrom") -> None: + pass + + @mark_no_op + def leave_ImportStar(self, original_node: "ImportStar") -> None: + pass + + @mark_no_op + def leave_In(self, original_node: "In") -> None: + pass + + @mark_no_op + def leave_IndentedBlock(self, original_node: "IndentedBlock") -> None: + pass + + @mark_no_op + def leave_Index(self, original_node: "Index") -> None: + pass + + @mark_no_op + def leave_Integer(self, original_node: "Integer") -> None: + pass + + @mark_no_op + def leave_Is(self, original_node: "Is") -> None: + pass + + @mark_no_op + def leave_IsNot(self, original_node: "IsNot") -> None: + pass + + @mark_no_op + def leave_Lambda(self, original_node: "Lambda") -> None: + pass + + @mark_no_op + def leave_LeftCurlyBrace(self, original_node: "LeftCurlyBrace") -> None: + pass + + @mark_no_op + def leave_LeftParen(self, original_node: "LeftParen") -> None: + pass + + @mark_no_op + def leave_LeftShift(self, original_node: "LeftShift") -> None: + pass + + @mark_no_op + def leave_LeftShiftAssign(self, original_node: "LeftShiftAssign") -> None: + pass + + @mark_no_op + def leave_LeftSquareBracket(self, original_node: "LeftSquareBracket") -> None: + pass + + @mark_no_op + def leave_LessThan(self, original_node: "LessThan") -> None: + pass + + @mark_no_op + def leave_LessThanEqual(self, original_node: "LessThanEqual") -> None: + pass + + @mark_no_op + def leave_List(self, original_node: "List") -> None: + pass + + @mark_no_op + def leave_ListComp(self, original_node: "ListComp") -> None: + pass + + @mark_no_op + def leave_Match(self, original_node: "Match") -> None: + pass + + @mark_no_op + def leave_MatchAs(self, original_node: "MatchAs") -> None: + pass + + @mark_no_op + def leave_MatchCase(self, original_node: "MatchCase") -> None: + pass + + @mark_no_op + def leave_MatchClass(self, original_node: "MatchClass") -> None: + pass + + @mark_no_op + def leave_MatchKeywordElement(self, original_node: "MatchKeywordElement") -> None: + pass + + @mark_no_op + def leave_MatchList(self, original_node: "MatchList") -> None: + pass + + @mark_no_op + def leave_MatchMapping(self, original_node: "MatchMapping") -> None: + pass + + @mark_no_op + def leave_MatchMappingElement(self, original_node: "MatchMappingElement") -> None: + pass + + @mark_no_op + def leave_MatchOr(self, original_node: "MatchOr") -> None: + pass + + @mark_no_op + def leave_MatchOrElement(self, original_node: "MatchOrElement") -> None: + pass + + @mark_no_op + def leave_MatchPattern(self, original_node: "MatchPattern") -> None: + pass + + @mark_no_op + def leave_MatchSequence(self, original_node: "MatchSequence") -> None: + pass + + @mark_no_op + def leave_MatchSequenceElement(self, original_node: "MatchSequenceElement") -> None: + pass + + @mark_no_op + def leave_MatchSingleton(self, original_node: "MatchSingleton") -> None: + pass + + @mark_no_op + def leave_MatchStar(self, original_node: "MatchStar") -> None: + pass + + @mark_no_op + def leave_MatchTuple(self, original_node: "MatchTuple") -> None: + pass + + @mark_no_op + def leave_MatchValue(self, original_node: "MatchValue") -> None: + pass + + @mark_no_op + def leave_MatrixMultiply(self, original_node: "MatrixMultiply") -> None: + pass + + @mark_no_op + def leave_MatrixMultiplyAssign(self, original_node: "MatrixMultiplyAssign") -> None: + pass + + @mark_no_op + def leave_Minus(self, original_node: "Minus") -> None: + pass + + @mark_no_op + def leave_Module(self, original_node: "Module") -> None: + pass + + @mark_no_op + def leave_Modulo(self, original_node: "Modulo") -> None: + pass + + @mark_no_op + def leave_ModuloAssign(self, original_node: "ModuloAssign") -> None: + pass + + @mark_no_op + def leave_Multiply(self, original_node: "Multiply") -> None: + pass + + @mark_no_op + def leave_MultiplyAssign(self, original_node: "MultiplyAssign") -> None: + pass + + @mark_no_op + def leave_Name(self, original_node: "Name") -> None: + pass + + @mark_no_op + def leave_NameItem(self, original_node: "NameItem") -> None: + pass + + @mark_no_op + def leave_NamedExpr(self, original_node: "NamedExpr") -> None: + pass + + @mark_no_op + def leave_Newline(self, original_node: "Newline") -> None: + pass + + @mark_no_op + def leave_Nonlocal(self, original_node: "Nonlocal") -> None: + pass + + @mark_no_op + def leave_Not(self, original_node: "Not") -> None: + pass + + @mark_no_op + def leave_NotEqual(self, original_node: "NotEqual") -> None: + pass + + @mark_no_op + def leave_NotIn(self, original_node: "NotIn") -> None: + pass + + @mark_no_op + def leave_Or(self, original_node: "Or") -> None: + pass + + @mark_no_op + def leave_Param(self, original_node: "Param") -> None: + pass + + @mark_no_op + def leave_ParamSlash(self, original_node: "ParamSlash") -> None: + pass + + @mark_no_op + def leave_ParamSpec(self, original_node: "ParamSpec") -> None: + pass + + @mark_no_op + def leave_ParamStar(self, original_node: "ParamStar") -> None: + pass + + @mark_no_op + def leave_Parameters(self, original_node: "Parameters") -> None: + pass + + @mark_no_op + def leave_ParenthesizedWhitespace( + self, original_node: "ParenthesizedWhitespace" + ) -> None: + pass + + @mark_no_op + def leave_Pass(self, original_node: "Pass") -> None: + pass + + @mark_no_op + def leave_Plus(self, original_node: "Plus") -> None: + pass + + @mark_no_op + def leave_Power(self, original_node: "Power") -> None: + pass + + @mark_no_op + def leave_PowerAssign(self, original_node: "PowerAssign") -> None: + pass + + @mark_no_op + def leave_Raise(self, original_node: "Raise") -> None: + pass + + @mark_no_op + def leave_Return(self, original_node: "Return") -> None: + pass + + @mark_no_op + def leave_RightCurlyBrace(self, original_node: "RightCurlyBrace") -> None: + pass + + @mark_no_op + def leave_RightParen(self, original_node: "RightParen") -> None: + pass + + @mark_no_op + def leave_RightShift(self, original_node: "RightShift") -> None: + pass + + @mark_no_op + def leave_RightShiftAssign(self, original_node: "RightShiftAssign") -> None: + pass + + @mark_no_op + def leave_RightSquareBracket(self, original_node: "RightSquareBracket") -> None: + pass + + @mark_no_op + def leave_Semicolon(self, original_node: "Semicolon") -> None: + pass + + @mark_no_op + def leave_Set(self, original_node: "Set") -> None: + pass + + @mark_no_op + def leave_SetComp(self, original_node: "SetComp") -> None: + pass + + @mark_no_op + def leave_SimpleStatementLine(self, original_node: "SimpleStatementLine") -> None: + pass + + @mark_no_op + def leave_SimpleStatementSuite(self, original_node: "SimpleStatementSuite") -> None: + pass + + @mark_no_op + def leave_SimpleString(self, original_node: "SimpleString") -> None: + pass + + @mark_no_op + def leave_SimpleWhitespace(self, original_node: "SimpleWhitespace") -> None: + pass + + @mark_no_op + def leave_Slice(self, original_node: "Slice") -> None: + pass + + @mark_no_op + def leave_StarredDictElement(self, original_node: "StarredDictElement") -> None: + pass + + @mark_no_op + def leave_StarredElement(self, original_node: "StarredElement") -> None: + pass + + @mark_no_op + def leave_Subscript(self, original_node: "Subscript") -> None: + pass + + @mark_no_op + def leave_SubscriptElement(self, original_node: "SubscriptElement") -> None: + pass + + @mark_no_op + def leave_Subtract(self, original_node: "Subtract") -> None: + pass + + @mark_no_op + def leave_SubtractAssign(self, original_node: "SubtractAssign") -> None: + pass + + @mark_no_op + def leave_TrailingWhitespace(self, original_node: "TrailingWhitespace") -> None: + pass + + @mark_no_op + def leave_Try(self, original_node: "Try") -> None: + pass + + @mark_no_op + def leave_TryStar(self, original_node: "TryStar") -> None: + pass + + @mark_no_op + def leave_Tuple(self, original_node: "Tuple") -> None: + pass + + @mark_no_op + def leave_TypeAlias(self, original_node: "TypeAlias") -> None: + pass + + @mark_no_op + def leave_TypeParam(self, original_node: "TypeParam") -> None: + pass + + @mark_no_op + def leave_TypeParameters(self, original_node: "TypeParameters") -> None: + pass + + @mark_no_op + def leave_TypeVar(self, original_node: "TypeVar") -> None: + pass + + @mark_no_op + def leave_TypeVarTuple(self, original_node: "TypeVarTuple") -> None: + pass + + @mark_no_op + def leave_UnaryOperation(self, original_node: "UnaryOperation") -> None: + pass + + @mark_no_op + def leave_While(self, original_node: "While") -> None: + pass + + @mark_no_op + def leave_With(self, original_node: "With") -> None: + pass + + @mark_no_op + def leave_WithItem(self, original_node: "WithItem") -> None: + pass + + @mark_no_op + def leave_Yield(self, original_node: "Yield") -> None: + pass + + +class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): + @mark_no_op + def leave_Add(self, original_node: "Add", updated_node: "Add") -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_AddAssign( + self, original_node: "AddAssign", updated_node: "AddAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_And(self, original_node: "And", updated_node: "And") -> "BaseBooleanOp": + return updated_node + + @mark_no_op + def leave_AnnAssign( + self, original_node: "AnnAssign", updated_node: "AnnAssign" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Annotation( + self, original_node: "Annotation", updated_node: "Annotation" + ) -> "Annotation": + return updated_node + + @mark_no_op + def leave_Arg( + self, original_node: "Arg", updated_node: "Arg" + ) -> Union["Arg", FlattenSentinel["Arg"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_AsName(self, original_node: "AsName", updated_node: "AsName") -> "AsName": + return updated_node + + @mark_no_op + def leave_Assert( + self, original_node: "Assert", updated_node: "Assert" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Assign( + self, original_node: "Assign", updated_node: "Assign" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_AssignEqual( + self, original_node: "AssignEqual", updated_node: "AssignEqual" + ) -> Union["AssignEqual", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_AssignTarget( + self, original_node: "AssignTarget", updated_node: "AssignTarget" + ) -> Union["AssignTarget", FlattenSentinel["AssignTarget"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Asynchronous( + self, original_node: "Asynchronous", updated_node: "Asynchronous" + ) -> "Asynchronous": + return updated_node + + @mark_no_op + def leave_Attribute( + self, original_node: "Attribute", updated_node: "Attribute" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_AugAssign( + self, original_node: "AugAssign", updated_node: "AugAssign" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Await( + self, original_node: "Await", updated_node: "Await" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_BinaryOperation( + self, original_node: "BinaryOperation", updated_node: "BinaryOperation" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_BitAnd( + self, original_node: "BitAnd", updated_node: "BitAnd" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_BitAndAssign( + self, original_node: "BitAndAssign", updated_node: "BitAndAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_BitInvert( + self, original_node: "BitInvert", updated_node: "BitInvert" + ) -> "BaseUnaryOp": + return updated_node + + @mark_no_op + def leave_BitOr( + self, original_node: "BitOr", updated_node: "BitOr" + ) -> Union["BaseBinaryOp", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_BitOrAssign( + self, original_node: "BitOrAssign", updated_node: "BitOrAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_BitXor( + self, original_node: "BitXor", updated_node: "BitXor" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_BitXorAssign( + self, original_node: "BitXorAssign", updated_node: "BitXorAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_BooleanOperation( + self, original_node: "BooleanOperation", updated_node: "BooleanOperation" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Break( + self, original_node: "Break", updated_node: "Break" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Call( + self, original_node: "Call", updated_node: "Call" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_ClassDef( + self, original_node: "ClassDef", updated_node: "ClassDef" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Colon( + self, original_node: "Colon", updated_node: "Colon" + ) -> Union["Colon", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Comma( + self, original_node: "Comma", updated_node: "Comma" + ) -> Union["Comma", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Comment( + self, original_node: "Comment", updated_node: "Comment" + ) -> "Comment": + return updated_node + + @mark_no_op + def leave_CompFor( + self, original_node: "CompFor", updated_node: "CompFor" + ) -> "CompFor": + return updated_node + + @mark_no_op + def leave_CompIf(self, original_node: "CompIf", updated_node: "CompIf") -> "CompIf": + return updated_node + + @mark_no_op + def leave_Comparison( + self, original_node: "Comparison", updated_node: "Comparison" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_ComparisonTarget( + self, original_node: "ComparisonTarget", updated_node: "ComparisonTarget" + ) -> Union[ + "ComparisonTarget", FlattenSentinel["ComparisonTarget"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_ConcatenatedString( + self, original_node: "ConcatenatedString", updated_node: "ConcatenatedString" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Continue( + self, original_node: "Continue", updated_node: "Continue" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Decorator( + self, original_node: "Decorator", updated_node: "Decorator" + ) -> Union["Decorator", FlattenSentinel["Decorator"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Del( + self, original_node: "Del", updated_node: "Del" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Dict( + self, original_node: "Dict", updated_node: "Dict" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_DictComp( + self, original_node: "DictComp", updated_node: "DictComp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_DictElement( + self, original_node: "DictElement", updated_node: "DictElement" + ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Divide( + self, original_node: "Divide", updated_node: "Divide" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_DivideAssign( + self, original_node: "DivideAssign", updated_node: "DivideAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Dot( + self, original_node: "Dot", updated_node: "Dot" + ) -> Union["Dot", FlattenSentinel["Dot"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Element( + self, original_node: "Element", updated_node: "Element" + ) -> Union["BaseElement", FlattenSentinel["BaseElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Ellipsis( + self, original_node: "Ellipsis", updated_node: "Ellipsis" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Else(self, original_node: "Else", updated_node: "Else") -> "Else": + return updated_node + + @mark_no_op + def leave_EmptyLine( + self, original_node: "EmptyLine", updated_node: "EmptyLine" + ) -> Union["EmptyLine", FlattenSentinel["EmptyLine"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Equal( + self, original_node: "Equal", updated_node: "Equal" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_ExceptHandler( + self, original_node: "ExceptHandler", updated_node: "ExceptHandler" + ) -> Union["ExceptHandler", FlattenSentinel["ExceptHandler"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_ExceptStarHandler( + self, original_node: "ExceptStarHandler", updated_node: "ExceptStarHandler" + ) -> Union[ + "ExceptStarHandler", FlattenSentinel["ExceptStarHandler"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Expr( + self, original_node: "Expr", updated_node: "Expr" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Finally( + self, original_node: "Finally", updated_node: "Finally" + ) -> "Finally": + return updated_node + + @mark_no_op + def leave_Float( + self, original_node: "Float", updated_node: "Float" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_FloorDivide( + self, original_node: "FloorDivide", updated_node: "FloorDivide" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_FloorDivideAssign( + self, original_node: "FloorDivideAssign", updated_node: "FloorDivideAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_For( + self, original_node: "For", updated_node: "For" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_FormattedString( + self, original_node: "FormattedString", updated_node: "FormattedString" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_FormattedStringExpression( + self, + original_node: "FormattedStringExpression", + updated_node: "FormattedStringExpression", + ) -> Union[ + "BaseFormattedStringContent", + FlattenSentinel["BaseFormattedStringContent"], + RemovalSentinel, + ]: + return updated_node + + @mark_no_op + def leave_FormattedStringText( + self, original_node: "FormattedStringText", updated_node: "FormattedStringText" + ) -> Union[ + "BaseFormattedStringContent", + FlattenSentinel["BaseFormattedStringContent"], + RemovalSentinel, + ]: + return updated_node + + @mark_no_op + def leave_From(self, original_node: "From", updated_node: "From") -> "From": + return updated_node + + @mark_no_op + def leave_FunctionDef( + self, original_node: "FunctionDef", updated_node: "FunctionDef" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_GeneratorExp( + self, original_node: "GeneratorExp", updated_node: "GeneratorExp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Global( + self, original_node: "Global", updated_node: "Global" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_GreaterThan( + self, original_node: "GreaterThan", updated_node: "GreaterThan" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_GreaterThanEqual( + self, original_node: "GreaterThanEqual", updated_node: "GreaterThanEqual" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_If( + self, original_node: "If", updated_node: "If" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_IfExp( + self, original_node: "IfExp", updated_node: "IfExp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Imaginary( + self, original_node: "Imaginary", updated_node: "Imaginary" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Import( + self, original_node: "Import", updated_node: "Import" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_ImportAlias( + self, original_node: "ImportAlias", updated_node: "ImportAlias" + ) -> Union["ImportAlias", FlattenSentinel["ImportAlias"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_ImportFrom( + self, original_node: "ImportFrom", updated_node: "ImportFrom" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_ImportStar( + self, original_node: "ImportStar", updated_node: "ImportStar" + ) -> "ImportStar": + return updated_node + + @mark_no_op + def leave_In(self, original_node: "In", updated_node: "In") -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_IndentedBlock( + self, original_node: "IndentedBlock", updated_node: "IndentedBlock" + ) -> "BaseSuite": + return updated_node + + @mark_no_op + def leave_Index(self, original_node: "Index", updated_node: "Index") -> "BaseSlice": + return updated_node + + @mark_no_op + def leave_Integer( + self, original_node: "Integer", updated_node: "Integer" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Is(self, original_node: "Is", updated_node: "Is") -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_IsNot( + self, original_node: "IsNot", updated_node: "IsNot" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_Lambda( + self, original_node: "Lambda", updated_node: "Lambda" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_LeftCurlyBrace( + self, original_node: "LeftCurlyBrace", updated_node: "LeftCurlyBrace" + ) -> "LeftCurlyBrace": + return updated_node + + @mark_no_op + def leave_LeftParen( + self, original_node: "LeftParen", updated_node: "LeftParen" + ) -> Union[ + "LeftParen", MaybeSentinel, FlattenSentinel["LeftParen"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_LeftShift( + self, original_node: "LeftShift", updated_node: "LeftShift" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_LeftShiftAssign( + self, original_node: "LeftShiftAssign", updated_node: "LeftShiftAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_LeftSquareBracket( + self, original_node: "LeftSquareBracket", updated_node: "LeftSquareBracket" + ) -> "LeftSquareBracket": + return updated_node + + @mark_no_op + def leave_LessThan( + self, original_node: "LessThan", updated_node: "LessThan" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_LessThanEqual( + self, original_node: "LessThanEqual", updated_node: "LessThanEqual" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_List( + self, original_node: "List", updated_node: "List" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_ListComp( + self, original_node: "ListComp", updated_node: "ListComp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Match( + self, original_node: "Match", updated_node: "Match" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_MatchAs( + self, original_node: "MatchAs", updated_node: "MatchAs" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchCase( + self, original_node: "MatchCase", updated_node: "MatchCase" + ) -> "MatchCase": + return updated_node + + @mark_no_op + def leave_MatchClass( + self, original_node: "MatchClass", updated_node: "MatchClass" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchKeywordElement( + self, original_node: "MatchKeywordElement", updated_node: "MatchKeywordElement" + ) -> Union[ + "MatchKeywordElement", FlattenSentinel["MatchKeywordElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchList( + self, original_node: "MatchList", updated_node: "MatchList" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchMapping( + self, original_node: "MatchMapping", updated_node: "MatchMapping" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchMappingElement( + self, original_node: "MatchMappingElement", updated_node: "MatchMappingElement" + ) -> Union[ + "MatchMappingElement", FlattenSentinel["MatchMappingElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchOr( + self, original_node: "MatchOr", updated_node: "MatchOr" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchOrElement( + self, original_node: "MatchOrElement", updated_node: "MatchOrElement" + ) -> Union["MatchOrElement", FlattenSentinel["MatchOrElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_MatchPattern( + self, original_node: "MatchPattern", updated_node: "MatchPattern" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchSequence( + self, original_node: "MatchSequence", updated_node: "MatchSequence" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchSequenceElement( + self, + original_node: "MatchSequenceElement", + updated_node: "MatchSequenceElement", + ) -> Union[ + "MatchSequenceElement", FlattenSentinel["MatchSequenceElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_MatchSingleton( + self, original_node: "MatchSingleton", updated_node: "MatchSingleton" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchStar( + self, original_node: "MatchStar", updated_node: "MatchStar" + ) -> "MatchStar": + return updated_node + + @mark_no_op + def leave_MatchTuple( + self, original_node: "MatchTuple", updated_node: "MatchTuple" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatchValue( + self, original_node: "MatchValue", updated_node: "MatchValue" + ) -> "MatchPattern": + return updated_node + + @mark_no_op + def leave_MatrixMultiply( + self, original_node: "MatrixMultiply", updated_node: "MatrixMultiply" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_MatrixMultiplyAssign( + self, + original_node: "MatrixMultiplyAssign", + updated_node: "MatrixMultiplyAssign", + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Minus( + self, original_node: "Minus", updated_node: "Minus" + ) -> "BaseUnaryOp": + return updated_node + + @mark_no_op + def leave_Module(self, original_node: "Module", updated_node: "Module") -> "Module": + return updated_node + + @mark_no_op + def leave_Modulo( + self, original_node: "Modulo", updated_node: "Modulo" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_ModuloAssign( + self, original_node: "ModuloAssign", updated_node: "ModuloAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Multiply( + self, original_node: "Multiply", updated_node: "Multiply" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_MultiplyAssign( + self, original_node: "MultiplyAssign", updated_node: "MultiplyAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Name( + self, original_node: "Name", updated_node: "Name" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_NameItem( + self, original_node: "NameItem", updated_node: "NameItem" + ) -> Union["NameItem", FlattenSentinel["NameItem"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_NamedExpr( + self, original_node: "NamedExpr", updated_node: "NamedExpr" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Newline( + self, original_node: "Newline", updated_node: "Newline" + ) -> "Newline": + return updated_node + + @mark_no_op + def leave_Nonlocal( + self, original_node: "Nonlocal", updated_node: "Nonlocal" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Not(self, original_node: "Not", updated_node: "Not") -> "BaseUnaryOp": + return updated_node + + @mark_no_op + def leave_NotEqual( + self, original_node: "NotEqual", updated_node: "NotEqual" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_NotIn( + self, original_node: "NotIn", updated_node: "NotIn" + ) -> "BaseCompOp": + return updated_node + + @mark_no_op + def leave_Or(self, original_node: "Or", updated_node: "Or") -> "BaseBooleanOp": + return updated_node + + @mark_no_op + def leave_Param( + self, original_node: "Param", updated_node: "Param" + ) -> Union["Param", MaybeSentinel, FlattenSentinel["Param"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_ParamSlash( + self, original_node: "ParamSlash", updated_node: "ParamSlash" + ) -> Union["ParamSlash", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_ParamSpec( + self, original_node: "ParamSpec", updated_node: "ParamSpec" + ) -> "ParamSpec": + return updated_node + + @mark_no_op + def leave_ParamStar( + self, original_node: "ParamStar", updated_node: "ParamStar" + ) -> Union["ParamStar", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Parameters( + self, original_node: "Parameters", updated_node: "Parameters" + ) -> "Parameters": + return updated_node + + @mark_no_op + def leave_ParenthesizedWhitespace( + self, + original_node: "ParenthesizedWhitespace", + updated_node: "ParenthesizedWhitespace", + ) -> Union["BaseParenthesizableWhitespace", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Pass( + self, original_node: "Pass", updated_node: "Pass" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Plus(self, original_node: "Plus", updated_node: "Plus") -> "BaseUnaryOp": + return updated_node + + @mark_no_op + def leave_Power( + self, original_node: "Power", updated_node: "Power" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_PowerAssign( + self, original_node: "PowerAssign", updated_node: "PowerAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_Raise( + self, original_node: "Raise", updated_node: "Raise" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Return( + self, original_node: "Return", updated_node: "Return" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_RightCurlyBrace( + self, original_node: "RightCurlyBrace", updated_node: "RightCurlyBrace" + ) -> "RightCurlyBrace": + return updated_node + + @mark_no_op + def leave_RightParen( + self, original_node: "RightParen", updated_node: "RightParen" + ) -> Union[ + "RightParen", MaybeSentinel, FlattenSentinel["RightParen"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_RightShift( + self, original_node: "RightShift", updated_node: "RightShift" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_RightShiftAssign( + self, original_node: "RightShiftAssign", updated_node: "RightShiftAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_RightSquareBracket( + self, original_node: "RightSquareBracket", updated_node: "RightSquareBracket" + ) -> "RightSquareBracket": + return updated_node + + @mark_no_op + def leave_Semicolon( + self, original_node: "Semicolon", updated_node: "Semicolon" + ) -> Union["Semicolon", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Set(self, original_node: "Set", updated_node: "Set") -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_SetComp( + self, original_node: "SetComp", updated_node: "SetComp" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_SimpleStatementLine( + self, original_node: "SimpleStatementLine", updated_node: "SimpleStatementLine" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_SimpleStatementSuite( + self, + original_node: "SimpleStatementSuite", + updated_node: "SimpleStatementSuite", + ) -> "BaseSuite": + return updated_node + + @mark_no_op + def leave_SimpleString( + self, original_node: "SimpleString", updated_node: "SimpleString" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_SimpleWhitespace( + self, original_node: "SimpleWhitespace", updated_node: "SimpleWhitespace" + ) -> Union["BaseParenthesizableWhitespace", MaybeSentinel]: + return updated_node + + @mark_no_op + def leave_Slice(self, original_node: "Slice", updated_node: "Slice") -> "BaseSlice": + return updated_node + + @mark_no_op + def leave_StarredDictElement( + self, original_node: "StarredDictElement", updated_node: "StarredDictElement" + ) -> Union["BaseDictElement", FlattenSentinel["BaseDictElement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_StarredElement( + self, original_node: "StarredElement", updated_node: "StarredElement" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_Subscript( + self, original_node: "Subscript", updated_node: "Subscript" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_SubscriptElement( + self, original_node: "SubscriptElement", updated_node: "SubscriptElement" + ) -> Union[ + "SubscriptElement", FlattenSentinel["SubscriptElement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_Subtract( + self, original_node: "Subtract", updated_node: "Subtract" + ) -> "BaseBinaryOp": + return updated_node + + @mark_no_op + def leave_SubtractAssign( + self, original_node: "SubtractAssign", updated_node: "SubtractAssign" + ) -> "BaseAugOp": + return updated_node + + @mark_no_op + def leave_TrailingWhitespace( + self, original_node: "TrailingWhitespace", updated_node: "TrailingWhitespace" + ) -> "TrailingWhitespace": + return updated_node + + @mark_no_op + def leave_Try( + self, original_node: "Try", updated_node: "Try" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_TryStar( + self, original_node: "TryStar", updated_node: "TryStar" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Tuple( + self, original_node: "Tuple", updated_node: "Tuple" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_TypeAlias( + self, original_node: "TypeAlias", updated_node: "TypeAlias" + ) -> Union[ + "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel + ]: + return updated_node + + @mark_no_op + def leave_TypeParam( + self, original_node: "TypeParam", updated_node: "TypeParam" + ) -> Union["TypeParam", FlattenSentinel["TypeParam"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_TypeParameters( + self, original_node: "TypeParameters", updated_node: "TypeParameters" + ) -> "TypeParameters": + return updated_node + + @mark_no_op + def leave_TypeVar( + self, original_node: "TypeVar", updated_node: "TypeVar" + ) -> "TypeVar": + return updated_node + + @mark_no_op + def leave_TypeVarTuple( + self, original_node: "TypeVarTuple", updated_node: "TypeVarTuple" + ) -> "TypeVarTuple": + return updated_node + + @mark_no_op + def leave_UnaryOperation( + self, original_node: "UnaryOperation", updated_node: "UnaryOperation" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_While( + self, original_node: "While", updated_node: "While" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_With( + self, original_node: "With", updated_node: "With" + ) -> Union["BaseStatement", FlattenSentinel["BaseStatement"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_WithItem( + self, original_node: "WithItem", updated_node: "WithItem" + ) -> Union["WithItem", FlattenSentinel["WithItem"], RemovalSentinel]: + return updated_node + + @mark_no_op + def leave_Yield( + self, original_node: "Yield", updated_node: "Yield" + ) -> "BaseExpression": + return updated_node diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index e9698462..57e1b4c9 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -1,16259 +1,16303 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -# This file was generated by libcst.codegen.gen_matcher_classes -from dataclasses import dataclass -from typing import Literal, Optional, Sequence, Union - -import libcst as cst -from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit - -from libcst.matchers._matcher_base import ( - AbstractBaseMatcherNodeMeta, - AllOf, - AtLeastN, - AtMostN, - BaseMatcherNode, - DoesNotMatch, - DoNotCare, - DoNotCareSentinel, - extract, - extractall, - findall, - matches, - MatchIfTrue, - MatchMetadata, - MatchMetadataIfTrue, - MatchRegex, - OneOf, - replace, - SaveMatchedNode, - TypeOf, - ZeroOrMore, - ZeroOrOne, -) -from libcst.matchers._visitors import ( - MatchDecoratorMismatch, - MatcherDecoratableTransformer, - MatcherDecoratableVisitor, -) - - -class _NodeABC(metaclass=AbstractBaseMatcherNodeMeta): - __slots__ = () - - -class BaseAssignTargetExpression(_NodeABC): - pass - - -class BaseAugOp(_NodeABC): - pass - - -class BaseBinaryOp(_NodeABC): - pass - - -class BaseBooleanOp(_NodeABC): - pass - - -class BaseComp(_NodeABC): - pass - - -class BaseCompOp(_NodeABC): - pass - - -class BaseCompoundStatement(_NodeABC): - pass - - -class BaseDelTargetExpression(_NodeABC): - pass - - -class BaseDict(_NodeABC): - pass - - -class BaseDictElement(_NodeABC): - pass - - -class BaseElement(_NodeABC): - pass - - -class BaseExpression(_NodeABC): - pass - - -class BaseFormattedStringContent(_NodeABC): - pass - - -class BaseList(_NodeABC): - pass - - -class BaseMetadataProvider(_NodeABC): - pass - - -class BaseNumber(_NodeABC): - pass - - -class BaseParenthesizableWhitespace(_NodeABC): - pass - - -class BaseSet(_NodeABC): - pass - - -class BaseSimpleComp(_NodeABC): - pass - - -class BaseSlice(_NodeABC): - pass - - -class BaseSmallStatement(_NodeABC): - pass - - -class BaseStatement(_NodeABC): - pass - - -class BaseString(_NodeABC): - pass - - -class BaseSuite(_NodeABC): - pass - - -class BaseUnaryOp(_NodeABC): - pass - - -MetadataMatchType = Union[MatchMetadata, MatchMetadataIfTrue] - - -BaseParenthesizableWhitespaceMatchType = Union[ - "BaseParenthesizableWhitespace", - MetadataMatchType, - MatchIfTrue[cst.BaseParenthesizableWhitespace], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Add(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AddAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class And(BaseBooleanOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseAssignTargetExpressionMatchType = Union[ - "BaseAssignTargetExpression", - MetadataMatchType, - MatchIfTrue[cst.BaseAssignTargetExpression], -] -AnnotationMatchType = Union[ - "Annotation", MetadataMatchType, MatchIfTrue[cst.Annotation] -] -AssignEqualMatchType = Union[ - "AssignEqual", MetadataMatchType, MatchIfTrue[cst.AssignEqual] -] -SemicolonMatchType = Union["Semicolon", MetadataMatchType, MatchIfTrue[cst.Semicolon]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AnnAssign(BaseSmallStatement, BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - annotation: Union[ - AnnotationMatchType, - DoNotCareSentinel, - OneOf[AnnotationMatchType], - AllOf[AnnotationMatchType], - ] = DoNotCare() - value: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - equal: Union[ - AssignEqualMatchType, - DoNotCareSentinel, - OneOf[AssignEqualMatchType], - AllOf[AssignEqualMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseExpressionMatchType = Union[ - "BaseExpression", MetadataMatchType, MatchIfTrue[cst.BaseExpression] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Annotation(BaseMatcherNode): - annotation: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - whitespace_before_indicator: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_indicator: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -CommaMatchType = Union["Comma", MetadataMatchType, MatchIfTrue[cst.Comma]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Arg(BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - keyword: Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Name]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - AllOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - ] = DoNotCare() - equal: Union[ - AssignEqualMatchType, - DoNotCareSentinel, - OneOf[AssignEqualMatchType], - AllOf[AssignEqualMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - star: Union[ - Literal["", "*", "**"], - MetadataMatchType, - MatchIfTrue[Literal["", "*", "**"]], - DoNotCareSentinel, - OneOf[ - Union[ - Literal["", "*", "**"], - MetadataMatchType, - MatchIfTrue[Literal["", "*", "**"]], - ] - ], - AllOf[ - Union[ - Literal["", "*", "**"], - MetadataMatchType, - MatchIfTrue[Literal["", "*", "**"]], - ] - ], - ] = DoNotCare() - whitespace_after_star: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_arg: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -NameOrTupleOrListMatchType = Union[ - "Name", - "Tuple", - "List", - MetadataMatchType, - MatchIfTrue[Union[cst.Name, cst.Tuple, cst.List]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AsName(BaseMatcherNode): - name: Union[ - NameOrTupleOrListMatchType, - DoNotCareSentinel, - OneOf[NameOrTupleOrListMatchType], - AllOf[NameOrTupleOrListMatchType], - ] = DoNotCare() - whitespace_before_as: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_as: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -SimpleWhitespaceMatchType = Union[ - "SimpleWhitespace", MetadataMatchType, MatchIfTrue[cst.SimpleWhitespace] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Assert(BaseSmallStatement, BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - msg: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_after_assert: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -AssignTargetMatchType = Union[ - "AssignTarget", MetadataMatchType, MatchIfTrue[cst.AssignTarget] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Assign(BaseSmallStatement, BaseMatcherNode): - targets: Union[ - Sequence[ - Union[ - AssignTargetMatchType, - DoNotCareSentinel, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - AtLeastN[ - Union[ - AssignTargetMatchType, - DoNotCareSentinel, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - AtMostN[ - Union[ - AssignTargetMatchType, - DoNotCareSentinel, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.AssignTarget]], - OneOf[ - Union[ - Sequence[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - AtLeastN[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - AtMostN[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.AssignTarget]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - AtLeastN[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - AtMostN[ - Union[ - AssignTargetMatchType, - OneOf[AssignTargetMatchType], - AllOf[AssignTargetMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.AssignTarget]], - ] - ], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AssignEqual(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AssignTarget(BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - whitespace_before_equal: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_equal: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Asynchronous(BaseMatcherNode): - whitespace_after: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -NameMatchType = Union["Name", MetadataMatchType, MatchIfTrue[cst.Name]] -DotMatchType = Union["Dot", MetadataMatchType, MatchIfTrue[cst.Dot]] -LeftParenMatchType = Union["LeftParen", MetadataMatchType, MatchIfTrue[cst.LeftParen]] -RightParenMatchType = Union[ - "RightParen", MetadataMatchType, MatchIfTrue[cst.RightParen] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Attribute( - BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode -): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - attr: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - dot: Union[ - DotMatchType, DoNotCareSentinel, OneOf[DotMatchType], AllOf[DotMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseAugOpMatchType = Union["BaseAugOp", MetadataMatchType, MatchIfTrue[cst.BaseAugOp]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class AugAssign(BaseSmallStatement, BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - operator: Union[ - BaseAugOpMatchType, - DoNotCareSentinel, - OneOf[BaseAugOpMatchType], - AllOf[BaseAugOpMatchType], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Await(BaseExpression, BaseMatcherNode): - expression: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_await: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseBinaryOpMatchType = Union[ - "BaseBinaryOp", MetadataMatchType, MatchIfTrue[cst.BaseBinaryOp] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BinaryOperation(BaseExpression, BaseMatcherNode): - left: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - operator: Union[ - BaseBinaryOpMatchType, - DoNotCareSentinel, - OneOf[BaseBinaryOpMatchType], - AllOf[BaseBinaryOpMatchType], - ] = DoNotCare() - right: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitAnd(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitAndAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitInvert(BaseUnaryOp, BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitOr(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitOrAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitXor(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BitXorAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseBooleanOpMatchType = Union[ - "BaseBooleanOp", MetadataMatchType, MatchIfTrue[cst.BaseBooleanOp] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class BooleanOperation(BaseExpression, BaseMatcherNode): - left: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - operator: Union[ - BaseBooleanOpMatchType, - DoNotCareSentinel, - OneOf[BaseBooleanOpMatchType], - AllOf[BaseBooleanOpMatchType], - ] = DoNotCare() - right: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Break(BaseSmallStatement, BaseMatcherNode): - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ArgMatchType = Union["Arg", MetadataMatchType, MatchIfTrue[cst.Arg]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Call(BaseExpression, BaseMatcherNode): - func: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - args: Union[ - Sequence[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - AtMostN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Arg]], - OneOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_func: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_args: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseSuiteMatchType = Union["BaseSuite", MetadataMatchType, MatchIfTrue[cst.BaseSuite]] -DecoratorMatchType = Union["Decorator", MetadataMatchType, MatchIfTrue[cst.Decorator]] -EmptyLineMatchType = Union["EmptyLine", MetadataMatchType, MatchIfTrue[cst.EmptyLine]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - bases: Union[ - Sequence[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - AtMostN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Arg]], - OneOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - ] = DoNotCare() - keywords: Union[ - Sequence[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - AtMostN[ - Union[ - ArgMatchType, - DoNotCareSentinel, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Arg]], - OneOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ArgMatchType, - OneOf[ArgMatchType], - AllOf[ArgMatchType], - AtLeastN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - AtMostN[ - Union[ - ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Arg]], - ] - ], - ] = DoNotCare() - decorators: Union[ - Sequence[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Decorator]], - OneOf[ - Union[ - Sequence[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Decorator]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Decorator]], - ] - ], - ] = DoNotCare() - lpar: Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] = DoNotCare() - rpar: Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - lines_after_decorators: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_class: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_name: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - type_parameters: Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - AllOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - ] = DoNotCare() - whitespace_after_type_parameters: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Colon(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Comma(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -strMatchType = Union[str, MetadataMatchType, MatchIfTrue[str]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Comment(BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -CompIfMatchType = Union["CompIf", MetadataMatchType, MatchIfTrue[cst.CompIf]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class CompFor(BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - iter: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - ifs: Union[ - Sequence[ - Union[ - CompIfMatchType, - DoNotCareSentinel, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - AtLeastN[ - Union[ - CompIfMatchType, - DoNotCareSentinel, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - AtMostN[ - Union[ - CompIfMatchType, - DoNotCareSentinel, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.CompIf]], - OneOf[ - Union[ - Sequence[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - AtLeastN[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - AtMostN[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.CompIf]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - AtLeastN[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - AtMostN[ - Union[ - CompIfMatchType, - OneOf[CompIfMatchType], - AllOf[CompIfMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.CompIf]], - ] - ], - ] = DoNotCare() - inner_for_in: Union[ - Optional["CompFor"], - MetadataMatchType, - MatchIfTrue[Optional[cst.CompFor]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["CompFor"], - MetadataMatchType, - MatchIfTrue[Optional[cst.CompFor]], - ] - ], - AllOf[ - Union[ - Optional["CompFor"], - MetadataMatchType, - MatchIfTrue[Optional[cst.CompFor]], - ] - ], - ] = DoNotCare() - asynchronous: Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - AllOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - ] = DoNotCare() - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_for: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_in: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_in: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class CompIf(BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_test: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ComparisonTargetMatchType = Union[ - "ComparisonTarget", MetadataMatchType, MatchIfTrue[cst.ComparisonTarget] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Comparison(BaseExpression, BaseMatcherNode): - left: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comparisons: Union[ - Sequence[ - Union[ - ComparisonTargetMatchType, - DoNotCareSentinel, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - AtLeastN[ - Union[ - ComparisonTargetMatchType, - DoNotCareSentinel, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - AtMostN[ - Union[ - ComparisonTargetMatchType, - DoNotCareSentinel, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ComparisonTarget]], - OneOf[ - Union[ - Sequence[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - AtLeastN[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - AtMostN[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ComparisonTarget]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - AtLeastN[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - AtMostN[ - Union[ - ComparisonTargetMatchType, - OneOf[ComparisonTargetMatchType], - AllOf[ComparisonTargetMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ComparisonTarget]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseCompOpMatchType = Union[ - "BaseCompOp", MetadataMatchType, MatchIfTrue[cst.BaseCompOp] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ComparisonTarget(BaseMatcherNode): - operator: Union[ - BaseCompOpMatchType, - DoNotCareSentinel, - OneOf[BaseCompOpMatchType], - AllOf[BaseCompOpMatchType], - ] = DoNotCare() - comparator: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -SimpleStringOrFormattedStringMatchType = Union[ - "SimpleString", - "FormattedString", - MetadataMatchType, - MatchIfTrue[Union[cst.SimpleString, cst.FormattedString]], -] -SimpleStringOrFormattedStringOrConcatenatedStringMatchType = Union[ - "SimpleString", - "FormattedString", - "ConcatenatedString", - MetadataMatchType, - MatchIfTrue[Union[cst.SimpleString, cst.FormattedString, cst.ConcatenatedString]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): - left: Union[ - SimpleStringOrFormattedStringMatchType, - DoNotCareSentinel, - OneOf[SimpleStringOrFormattedStringMatchType], - AllOf[SimpleStringOrFormattedStringMatchType], - ] = DoNotCare() - right: Union[ - SimpleStringOrFormattedStringOrConcatenatedStringMatchType, - DoNotCareSentinel, - OneOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType], - AllOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_between: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Continue(BaseSmallStatement, BaseMatcherNode): - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -TrailingWhitespaceMatchType = Union[ - "TrailingWhitespace", MetadataMatchType, MatchIfTrue[cst.TrailingWhitespace] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Decorator(BaseMatcherNode): - decorator: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_at: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - trailing_whitespace: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseDelTargetExpressionMatchType = Union[ - "BaseDelTargetExpression", - MetadataMatchType, - MatchIfTrue[cst.BaseDelTargetExpression], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Del(BaseSmallStatement, BaseMatcherNode): - target: Union[ - BaseDelTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseDelTargetExpressionMatchType], - AllOf[BaseDelTargetExpressionMatchType], - ] = DoNotCare() - whitespace_after_del: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseDictElementMatchType = Union[ - "BaseDictElement", MetadataMatchType, MatchIfTrue[cst.BaseDictElement] -] -LeftCurlyBraceMatchType = Union[ - "LeftCurlyBrace", MetadataMatchType, MatchIfTrue[cst.LeftCurlyBrace] -] -RightCurlyBraceMatchType = Union[ - "RightCurlyBrace", MetadataMatchType, MatchIfTrue[cst.RightCurlyBrace] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Dict(BaseDict, BaseExpression, BaseMatcherNode): - elements: Union[ - Sequence[ - Union[ - BaseDictElementMatchType, - DoNotCareSentinel, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - AtLeastN[ - Union[ - BaseDictElementMatchType, - DoNotCareSentinel, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseDictElementMatchType, - DoNotCareSentinel, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseDictElement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - AtLeastN[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseDictElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - AtLeastN[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseDictElementMatchType, - OneOf[BaseDictElementMatchType], - AllOf[BaseDictElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseDictElement]], - ] - ], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -CompForMatchType = Union["CompFor", MetadataMatchType, MatchIfTrue[cst.CompFor]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): - key: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - for_in: Union[ - CompForMatchType, - DoNotCareSentinel, - OneOf[CompForMatchType], - AllOf[CompForMatchType], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class DictElement(BaseDictElement, BaseMatcherNode): - key: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Divide(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class DivideAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Dot(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Element(BaseElement, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Ellipsis(BaseExpression, BaseMatcherNode): - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Else(BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -boolMatchType = Union[bool, MetadataMatchType, MatchIfTrue[bool]] -NewlineMatchType = Union["Newline", MetadataMatchType, MatchIfTrue[cst.Newline]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class EmptyLine(BaseMatcherNode): - indent: Union[ - boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] - ] = DoNotCare() - whitespace: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - comment: Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - ] - ], - AllOf[ - Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - ] - ], - ] = DoNotCare() - newline: Union[ - NewlineMatchType, - DoNotCareSentinel, - OneOf[NewlineMatchType], - AllOf[NewlineMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Equal(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ExceptHandler(BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - type: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - name: Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AsName]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - AllOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_except: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ExceptStarHandler(BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - type: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - name: Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AsName]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - AllOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_except: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_star: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Expr(BaseSmallStatement, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Finally(BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Float(BaseExpression, BaseNumber, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FloorDivide(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FloorDivideAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - target: Union[ - BaseAssignTargetExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseAssignTargetExpressionMatchType], - AllOf[BaseAssignTargetExpressionMatchType], - ] = DoNotCare() - iter: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - orelse: Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Else]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - AllOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - ] = DoNotCare() - asynchronous: Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - AllOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_for: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_in: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_in: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseFormattedStringContentMatchType = Union[ - "BaseFormattedStringContent", - MetadataMatchType, - MatchIfTrue[cst.BaseFormattedStringContent], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FormattedString(BaseExpression, BaseString, BaseMatcherNode): - parts: Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - AtLeastN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseFormattedStringContentMatchType, - OneOf[BaseFormattedStringContentMatchType], - AllOf[BaseFormattedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], - ] - ], - ] = DoNotCare() - start: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - end: Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - DoNotCareSentinel, - OneOf[ - Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - ] - ], - AllOf[ - Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): - expression: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - conversion: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - format_spec: Union[ - Optional[Sequence["BaseFormattedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional[Sequence["BaseFormattedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], - ] - ], - AllOf[ - Union[ - Optional[Sequence["BaseFormattedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], - ] - ], - ] = DoNotCare() - whitespace_before_expression: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_expression: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - equal: Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - ] - ], - AllOf[ - Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FormattedStringText(BaseFormattedStringContent, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class From(BaseMatcherNode): - item: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - whitespace_before_from: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_from: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ParametersMatchType = Union[ - "Parameters", MetadataMatchType, MatchIfTrue[cst.Parameters] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - params: Union[ - ParametersMatchType, - DoNotCareSentinel, - OneOf[ParametersMatchType], - AllOf[ParametersMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - decorators: Union[ - Sequence[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - DoNotCareSentinel, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Decorator]], - OneOf[ - Union[ - Sequence[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Decorator]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - AtLeastN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - AtMostN[ - Union[ - DecoratorMatchType, - OneOf[DecoratorMatchType], - AllOf[DecoratorMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Decorator]], - ] - ], - ] = DoNotCare() - returns: Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - ] - ], - AllOf[ - Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - ] - ], - ] = DoNotCare() - asynchronous: Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - AllOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - lines_after_decorators: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_def: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_name: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_params: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - type_parameters: Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - AllOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - ] = DoNotCare() - whitespace_after_type_parameters: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): - elt: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - for_in: Union[ - CompForMatchType, - DoNotCareSentinel, - OneOf[CompForMatchType], - AllOf[CompForMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -NameItemMatchType = Union["NameItem", MetadataMatchType, MatchIfTrue[cst.NameItem]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Global(BaseSmallStatement, BaseMatcherNode): - names: Union[ - Sequence[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.NameItem]], - OneOf[ - Union[ - Sequence[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.NameItem]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.NameItem]], - ] - ], - ] = DoNotCare() - whitespace_after_global: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class GreaterThan(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class GreaterThanEqual(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -IfOrElseOrNoneMatchType = Union[ - "If", "Else", None, MetadataMatchType, MatchIfTrue[Union[cst.If, cst.Else, None]] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class If(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - orelse: Union[ - IfOrElseOrNoneMatchType, - DoNotCareSentinel, - OneOf[IfOrElseOrNoneMatchType], - AllOf[IfOrElseOrNoneMatchType], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_test: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_test: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class IfExp(BaseExpression, BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - body: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - orelse: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_before_if: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_if: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_else: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_else: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ImportAliasMatchType = Union[ - "ImportAlias", MetadataMatchType, MatchIfTrue[cst.ImportAlias] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Import(BaseSmallStatement, BaseMatcherNode): - names: Union[ - Sequence[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ImportAlias]], - OneOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - whitespace_after_import: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -AttributeOrNameMatchType = Union[ - "Attribute", "Name", MetadataMatchType, MatchIfTrue[Union[cst.Attribute, cst.Name]] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ImportAlias(BaseMatcherNode): - name: Union[ - AttributeOrNameMatchType, - DoNotCareSentinel, - OneOf[AttributeOrNameMatchType], - AllOf[AttributeOrNameMatchType], - ] = DoNotCare() - asname: Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AsName]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - AllOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -AttributeOrNameOrNoneMatchType = Union[ - "Attribute", - "Name", - None, - MetadataMatchType, - MatchIfTrue[Union[cst.Attribute, cst.Name, None]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ImportFrom(BaseSmallStatement, BaseMatcherNode): - module: Union[ - AttributeOrNameOrNoneMatchType, - DoNotCareSentinel, - OneOf[AttributeOrNameOrNoneMatchType], - AllOf[AttributeOrNameOrNoneMatchType], - ] = DoNotCare() - names: Union[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - DoNotCareSentinel, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ImportAlias]], - OneOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - ], - "ImportStar", - MetadataMatchType, - MatchIfTrue[ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - DoNotCareSentinel, - OneOf[ - Union[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - OneOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - ], - "ImportStar", - MetadataMatchType, - MatchIfTrue[ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - ] - ], - AllOf[ - Union[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - OneOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - AtLeastN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - AtMostN[ - Union[ - ImportAliasMatchType, - OneOf[ImportAliasMatchType], - AllOf[ImportAliasMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ImportAlias]], - ] - ], - ], - "ImportStar", - MetadataMatchType, - MatchIfTrue[ - Union[ - Sequence[cst.ImportAlias], - cst.ImportStar, - OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], - ] - ], - ] - ], - ] = DoNotCare() - relative: Union[ - Sequence[ - Union[ - DotMatchType, - DoNotCareSentinel, - OneOf[DotMatchType], - AllOf[DotMatchType], - AtLeastN[ - Union[ - DotMatchType, - DoNotCareSentinel, - OneOf[DotMatchType], - AllOf[DotMatchType], - ] - ], - AtMostN[ - Union[ - DotMatchType, - DoNotCareSentinel, - OneOf[DotMatchType], - AllOf[DotMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Dot]], - OneOf[ - Union[ - Sequence[ - Union[ - DotMatchType, - OneOf[DotMatchType], - AllOf[DotMatchType], - AtLeastN[ - Union[ - DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] - ] - ], - AtMostN[ - Union[ - DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Dot]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - DotMatchType, - OneOf[DotMatchType], - AllOf[DotMatchType], - AtLeastN[ - Union[ - DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] - ] - ], - AtMostN[ - Union[ - DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Dot]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Optional["LeftParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftParen]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["LeftParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Optional["LeftParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Optional["RightParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightParen]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["RightParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Optional["RightParen"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightParen]], - ] - ], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - whitespace_after_from: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_import: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_import: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ImportStar(BaseMatcherNode): - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class In(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseStatementMatchType = Union[ - "BaseStatement", MetadataMatchType, MatchIfTrue[cst.BaseStatement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class IndentedBlock(BaseSuite, BaseMatcherNode): - body: Union[ - Sequence[ - Union[ - BaseStatementMatchType, - DoNotCareSentinel, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - AtLeastN[ - Union[ - BaseStatementMatchType, - DoNotCareSentinel, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseStatementMatchType, - DoNotCareSentinel, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseStatement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - AtLeastN[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseStatement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - AtLeastN[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseStatementMatchType, - OneOf[BaseStatementMatchType], - AllOf[BaseStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseStatement]], - ] - ], - ] = DoNotCare() - header: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - indent: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - footer: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Index(BaseSlice, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - star: Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - ] - ], - AllOf[ - Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - ] - ], - ] = DoNotCare() - whitespace_after_star: Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - ] - ], - AllOf[ - Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Integer(BaseExpression, BaseNumber, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Is(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class IsNot(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_between: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ColonMatchType = Union["Colon", MetadataMatchType, MatchIfTrue[cst.Colon]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Lambda(BaseExpression, BaseMatcherNode): - params: Union[ - ParametersMatchType, - DoNotCareSentinel, - OneOf[ParametersMatchType], - AllOf[ParametersMatchType], - ] = DoNotCare() - body: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - colon: Union[ - ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_lambda: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftCurlyBrace(BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftParen(BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftShift(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftShiftAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LeftSquareBracket(BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LessThan(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class LessThanEqual(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseElementMatchType = Union[ - "BaseElement", MetadataMatchType, MatchIfTrue[cst.BaseElement] -] -LeftSquareBracketMatchType = Union[ - "LeftSquareBracket", MetadataMatchType, MatchIfTrue[cst.LeftSquareBracket] -] -RightSquareBracketMatchType = Union[ - "RightSquareBracket", MetadataMatchType, MatchIfTrue[cst.RightSquareBracket] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class List( - BaseAssignTargetExpression, - BaseDelTargetExpression, - BaseExpression, - BaseList, - BaseMatcherNode, -): - elements: Union[ - Sequence[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseElement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - ] = DoNotCare() - lbracket: Union[ - LeftSquareBracketMatchType, - DoNotCareSentinel, - OneOf[LeftSquareBracketMatchType], - AllOf[LeftSquareBracketMatchType], - ] = DoNotCare() - rbracket: Union[ - RightSquareBracketMatchType, - DoNotCareSentinel, - OneOf[RightSquareBracketMatchType], - AllOf[RightSquareBracketMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNode): - elt: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - for_in: Union[ - CompForMatchType, - DoNotCareSentinel, - OneOf[CompForMatchType], - AllOf[CompForMatchType], - ] = DoNotCare() - lbracket: Union[ - LeftSquareBracketMatchType, - DoNotCareSentinel, - OneOf[LeftSquareBracketMatchType], - AllOf[LeftSquareBracketMatchType], - ] = DoNotCare() - rbracket: Union[ - RightSquareBracketMatchType, - DoNotCareSentinel, - OneOf[RightSquareBracketMatchType], - AllOf[RightSquareBracketMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchCaseMatchType = Union["MatchCase", MetadataMatchType, MatchIfTrue[cst.MatchCase]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Match(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - subject: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - cases: Union[ - Sequence[ - Union[ - MatchCaseMatchType, - DoNotCareSentinel, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - AtLeastN[ - Union[ - MatchCaseMatchType, - DoNotCareSentinel, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - AtMostN[ - Union[ - MatchCaseMatchType, - DoNotCareSentinel, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchCase]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - AtLeastN[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - AtMostN[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchCase]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - AtLeastN[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - AtMostN[ - Union[ - MatchCaseMatchType, - OneOf[MatchCaseMatchType], - AllOf[MatchCaseMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchCase]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_match: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_colon: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - indent: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - footer: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchAs(BaseMatcherNode): - pattern: Union[ - Optional["MatchPattern"], - MetadataMatchType, - MatchIfTrue[Optional[cst.MatchPattern]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["MatchPattern"], - MetadataMatchType, - MatchIfTrue[Optional[cst.MatchPattern]], - ] - ], - AllOf[ - Union[ - Optional["MatchPattern"], - MetadataMatchType, - MatchIfTrue[Optional[cst.MatchPattern]], - ] - ], - ] = DoNotCare() - name: Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Name]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - AllOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - ] = DoNotCare() - whitespace_before_as: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_as: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchPatternMatchType = Union[ - "MatchPattern", MetadataMatchType, MatchIfTrue[cst.MatchPattern] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchCase(BaseMatcherNode): - pattern: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - guard: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_case: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_if: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_if: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchSequenceElementMatchType = Union[ - "MatchSequenceElement", MetadataMatchType, MatchIfTrue[cst.MatchSequenceElement] -] -MatchKeywordElementMatchType = Union[ - "MatchKeywordElement", MetadataMatchType, MatchIfTrue[cst.MatchKeywordElement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchClass(BaseMatcherNode): - cls: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - patterns: Union[ - Sequence[ - Union[ - MatchSequenceElementMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - AtLeastN[ - Union[ - MatchSequenceElementMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchSequenceElement]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - AtLeastN[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchSequenceElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - AtLeastN[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementMatchType, - OneOf[MatchSequenceElementMatchType], - AllOf[MatchSequenceElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchSequenceElement]], - ] - ], - ] = DoNotCare() - kwds: Union[ - Sequence[ - Union[ - MatchKeywordElementMatchType, - DoNotCareSentinel, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - AtLeastN[ - Union[ - MatchKeywordElementMatchType, - DoNotCareSentinel, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchKeywordElementMatchType, - DoNotCareSentinel, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchKeywordElement]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - AtLeastN[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchKeywordElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - AtLeastN[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchKeywordElementMatchType, - OneOf[MatchKeywordElementMatchType], - AllOf[MatchKeywordElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchKeywordElement]], - ] - ], - ] = DoNotCare() - whitespace_after_cls: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_patterns: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_kwds: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchKeywordElement(BaseMatcherNode): - key: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - pattern: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_equal: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_equal: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchSequenceElementOrMatchStarMatchType = Union[ - "MatchSequenceElement", - "MatchStar", - MetadataMatchType, - MatchIfTrue[Union[cst.MatchSequenceElement, cst.MatchStar]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchList(BaseMatcherNode): - patterns: Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - OneOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - ] - ], - ] = DoNotCare() - lbracket: Union[ - Optional["LeftSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftSquareBracket]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["LeftSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftSquareBracket]], - ] - ], - AllOf[ - Union[ - Optional["LeftSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.LeftSquareBracket]], - ] - ], - ] = DoNotCare() - rbracket: Union[ - Optional["RightSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightSquareBracket]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["RightSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightSquareBracket]], - ] - ], - AllOf[ - Union[ - Optional["RightSquareBracket"], - MetadataMatchType, - MatchIfTrue[Optional[cst.RightSquareBracket]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchMappingElementMatchType = Union[ - "MatchMappingElement", MetadataMatchType, MatchIfTrue[cst.MatchMappingElement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchMapping(BaseMatcherNode): - elements: Union[ - Sequence[ - Union[ - MatchMappingElementMatchType, - DoNotCareSentinel, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - AtLeastN[ - Union[ - MatchMappingElementMatchType, - DoNotCareSentinel, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchMappingElementMatchType, - DoNotCareSentinel, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchMappingElement]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - AtLeastN[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchMappingElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - AtLeastN[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchMappingElementMatchType, - OneOf[MatchMappingElementMatchType], - AllOf[MatchMappingElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchMappingElement]], - ] - ], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - rest: Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Name]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - AllOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - ] = DoNotCare() - whitespace_before_rest: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - trailing_comma: Union[ - Optional["Comma"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comma]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] - ] - ], - AllOf[ - Union[ - Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchMappingElement(BaseMatcherNode): - key: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - pattern: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_colon: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -MatchOrElementMatchType = Union[ - "MatchOrElement", MetadataMatchType, MatchIfTrue[cst.MatchOrElement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchOr(BaseMatcherNode): - patterns: Union[ - Sequence[ - Union[ - MatchOrElementMatchType, - DoNotCareSentinel, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - AtLeastN[ - Union[ - MatchOrElementMatchType, - DoNotCareSentinel, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchOrElementMatchType, - DoNotCareSentinel, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.MatchOrElement]], - OneOf[ - Union[ - Sequence[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - AtLeastN[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchOrElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - AtLeastN[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - AtMostN[ - Union[ - MatchOrElementMatchType, - OneOf[MatchOrElementMatchType], - AllOf[MatchOrElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.MatchOrElement]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BitOrMatchType = Union["BitOr", MetadataMatchType, MatchIfTrue[cst.BitOr]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchOrElement(BaseMatcherNode): - pattern: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - separator: Union[ - BitOrMatchType, DoNotCareSentinel, OneOf[BitOrMatchType], AllOf[BitOrMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchPattern(BaseMatcherNode): - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchSequence(BaseMatcherNode): - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchSequenceElement(BaseMatcherNode): - value: Union[ - MatchPatternMatchType, - DoNotCareSentinel, - OneOf[MatchPatternMatchType], - AllOf[MatchPatternMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchSingleton(BaseMatcherNode): - value: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchStar(BaseMatcherNode): - name: Union[ - Optional["Name"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Name]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - AllOf[ - Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_name: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchTuple(BaseMatcherNode): - patterns: Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - DoNotCareSentinel, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - OneOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - AtLeastN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - AtMostN[ - Union[ - MatchSequenceElementOrMatchStarMatchType, - OneOf[MatchSequenceElementOrMatchStarMatchType], - AllOf[MatchSequenceElementOrMatchStarMatchType], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.MatchSequenceElement, - cst.MatchStar, - OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], - ] - ] - ], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatchValue(BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatrixMultiply(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MatrixMultiplyAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Minus(BaseUnaryOp, BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -SimpleStatementLineOrBaseCompoundStatementMatchType = Union[ - "SimpleStatementLine", - "BaseCompoundStatement", - MetadataMatchType, - MatchIfTrue[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Module(BaseMatcherNode): - body: Union[ - Sequence[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - DoNotCareSentinel, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AtLeastN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - DoNotCareSentinel, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - ] - ], - AtMostN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - DoNotCareSentinel, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[ - Sequence[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], - AllOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], - ] - ] - ], - OneOf[ - Union[ - Sequence[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AtLeastN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - AllOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - ] - ], - AtMostN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - AllOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - AllOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - ] - ] - ], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], - AtLeastN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - AllOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - ] - ], - AtMostN[ - Union[ - SimpleStatementLineOrBaseCompoundStatementMatchType, - OneOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - AllOf[ - SimpleStatementLineOrBaseCompoundStatementMatchType - ], - ] - ], - ] - ], - MatchIfTrue[ - Sequence[ - Union[ - cst.SimpleStatementLine, - cst.BaseCompoundStatement, - OneOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - AllOf[ - Union[ - cst.SimpleStatementLine, cst.BaseCompoundStatement - ] - ], - ] - ] - ], - ] - ], - ] = DoNotCare() - header: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - footer: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - encoding: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - default_indent: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - default_newline: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - has_trailing_newline: Union[ - boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Modulo(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ModuloAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Multiply(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class MultiplyAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Name( - BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode -): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class NameItem(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class NamedExpr(BaseExpression, BaseMatcherNode): - target: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_before_walrus: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_walrus: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Newline(BaseMatcherNode): - value: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Nonlocal(BaseSmallStatement, BaseMatcherNode): - names: Union[ - Sequence[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - DoNotCareSentinel, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.NameItem]], - OneOf[ - Union[ - Sequence[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.NameItem]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - AtLeastN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - AtMostN[ - Union[ - NameItemMatchType, - OneOf[NameItemMatchType], - AllOf[NameItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.NameItem]], - ] - ], - ] = DoNotCare() - whitespace_after_nonlocal: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Not(BaseUnaryOp, BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class NotEqual(BaseCompOp, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class NotIn(BaseCompOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_between: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Or(BaseBooleanOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Param(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - annotation: Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - ] - ], - AllOf[ - Union[ - Optional["Annotation"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Annotation]], - ] - ], - ] = DoNotCare() - equal: Union[ - AssignEqualMatchType, - DoNotCareSentinel, - OneOf[AssignEqualMatchType], - AllOf[AssignEqualMatchType], - ] = DoNotCare() - default: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - star: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - whitespace_after_star: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_param: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ParamSlash(BaseMatcherNode): - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ParamSpec(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - whitespace_after_star: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ParamStar(BaseMatcherNode): - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ParamMatchType = Union["Param", MetadataMatchType, MatchIfTrue[cst.Param]] -ParamOrParamStarMatchType = Union[ - "Param", - "ParamStar", - MetadataMatchType, - MatchIfTrue[Union[cst.Param, cst.ParamStar]], -] -ParamSlashMatchType = Union[ - "ParamSlash", MetadataMatchType, MatchIfTrue[cst.ParamSlash] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Parameters(BaseMatcherNode): - params: Union[ - Sequence[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Param]], - OneOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - ] = DoNotCare() - star_arg: Union[ - ParamOrParamStarMatchType, - DoNotCareSentinel, - OneOf[ParamOrParamStarMatchType], - AllOf[ParamOrParamStarMatchType], - ] = DoNotCare() - kwonly_params: Union[ - Sequence[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Param]], - OneOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - ] = DoNotCare() - star_kwarg: Union[ - Optional["Param"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Param]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] - ] - ], - AllOf[ - Union[ - Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] - ] - ], - ] = DoNotCare() - posonly_params: Union[ - Sequence[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - DoNotCareSentinel, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.Param]], - OneOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - AtLeastN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - AtMostN[ - Union[ - ParamMatchType, - OneOf[ParamMatchType], - AllOf[ParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.Param]], - ] - ], - ] = DoNotCare() - posonly_ind: Union[ - ParamSlashMatchType, - DoNotCareSentinel, - OneOf[ParamSlashMatchType], - AllOf[ParamSlashMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ParenthesizedWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): - first_line: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - empty_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - indent: Union[ - boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] - ] = DoNotCare() - last_line: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Pass(BaseSmallStatement, BaseMatcherNode): - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Plus(BaseUnaryOp, BaseMatcherNode): - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Power(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class PowerAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Raise(BaseSmallStatement, BaseMatcherNode): - exc: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - cause: Union[ - Optional["From"], - MetadataMatchType, - MatchIfTrue[Optional[cst.From]], - DoNotCareSentinel, - OneOf[ - Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] - ], - AllOf[ - Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] - ], - ] = DoNotCare() - whitespace_after_raise: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Return(BaseSmallStatement, BaseMatcherNode): - value: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - whitespace_after_return: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightCurlyBrace(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightParen(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightShift(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightShiftAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class RightSquareBracket(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Semicolon(BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Set(BaseExpression, BaseSet, BaseMatcherNode): - elements: Union[ - Sequence[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseElement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode): - elt: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - for_in: Union[ - CompForMatchType, - DoNotCareSentinel, - OneOf[CompForMatchType], - AllOf[CompForMatchType], - ] = DoNotCare() - lbrace: Union[ - LeftCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[LeftCurlyBraceMatchType], - AllOf[LeftCurlyBraceMatchType], - ] = DoNotCare() - rbrace: Union[ - RightCurlyBraceMatchType, - DoNotCareSentinel, - OneOf[RightCurlyBraceMatchType], - AllOf[RightCurlyBraceMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseSmallStatementMatchType = Union[ - "BaseSmallStatement", MetadataMatchType, MatchIfTrue[cst.BaseSmallStatement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SimpleStatementLine(BaseStatement, BaseMatcherNode): - body: Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - trailing_whitespace: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SimpleStatementSuite(BaseSuite, BaseMatcherNode): - body: Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - DoNotCareSentinel, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - AtLeastN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - AtMostN[ - Union[ - BaseSmallStatementMatchType, - OneOf[BaseSmallStatementMatchType], - AllOf[BaseSmallStatementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseSmallStatement]], - ] - ], - ] = DoNotCare() - leading_whitespace: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - trailing_whitespace: Union[ - TrailingWhitespaceMatchType, - DoNotCareSentinel, - OneOf[TrailingWhitespaceMatchType], - AllOf[TrailingWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SimpleString(BaseExpression, BaseString, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SimpleWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Slice(BaseSlice, BaseMatcherNode): - lower: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - upper: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - step: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - first_colon: Union[ - ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] - ] = DoNotCare() - second_colon: Union[ - ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class StarredDictElement(BaseDictElement, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - whitespace_before_value: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class StarredElement(BaseElement, BaseExpression, BaseMatcherNode): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_before_value: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -SubscriptElementMatchType = Union[ - "SubscriptElement", MetadataMatchType, MatchIfTrue[cst.SubscriptElement] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Subscript( - BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode -): - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - slice: Union[ - Sequence[ - Union[ - SubscriptElementMatchType, - DoNotCareSentinel, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - AtLeastN[ - Union[ - SubscriptElementMatchType, - DoNotCareSentinel, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - AtMostN[ - Union[ - SubscriptElementMatchType, - DoNotCareSentinel, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.SubscriptElement]], - OneOf[ - Union[ - Sequence[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - AtLeastN[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - AtMostN[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.SubscriptElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - AtLeastN[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - AtMostN[ - Union[ - SubscriptElementMatchType, - OneOf[SubscriptElementMatchType], - AllOf[SubscriptElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.SubscriptElement]], - ] - ], - ] = DoNotCare() - lbracket: Union[ - LeftSquareBracketMatchType, - DoNotCareSentinel, - OneOf[LeftSquareBracketMatchType], - AllOf[LeftSquareBracketMatchType], - ] = DoNotCare() - rbracket: Union[ - RightSquareBracketMatchType, - DoNotCareSentinel, - OneOf[RightSquareBracketMatchType], - AllOf[RightSquareBracketMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_value: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseSliceMatchType = Union["BaseSlice", MetadataMatchType, MatchIfTrue[cst.BaseSlice]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SubscriptElement(BaseMatcherNode): - slice: Union[ - BaseSliceMatchType, - DoNotCareSentinel, - OneOf[BaseSliceMatchType], - AllOf[BaseSliceMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Subtract(BaseBinaryOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class SubtractAssign(BaseAugOp, BaseMatcherNode): - whitespace_before: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TrailingWhitespace(BaseMatcherNode): - whitespace: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - comment: Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - ] - ], - AllOf[ - Union[ - Optional["Comment"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Comment]], - ] - ], - ] = DoNotCare() - newline: Union[ - NewlineMatchType, - DoNotCareSentinel, - OneOf[NewlineMatchType], - AllOf[NewlineMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ExceptHandlerMatchType = Union[ - "ExceptHandler", MetadataMatchType, MatchIfTrue[cst.ExceptHandler] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - handlers: Union[ - Sequence[ - Union[ - ExceptHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - AtLeastN[ - Union[ - ExceptHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ExceptHandler]], - OneOf[ - Union[ - Sequence[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - AtLeastN[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ExceptHandler]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - AtLeastN[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptHandlerMatchType, - OneOf[ExceptHandlerMatchType], - AllOf[ExceptHandlerMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ExceptHandler]], - ] - ], - ] = DoNotCare() - orelse: Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Else]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - AllOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - ] = DoNotCare() - finalbody: Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - ] - ], - AllOf[ - Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -ExceptStarHandlerMatchType = Union[ - "ExceptStarHandler", MetadataMatchType, MatchIfTrue[cst.ExceptStarHandler] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TryStar(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - handlers: Union[ - Sequence[ - Union[ - ExceptStarHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - AtLeastN[ - Union[ - ExceptStarHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptStarHandlerMatchType, - DoNotCareSentinel, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.ExceptStarHandler]], - OneOf[ - Union[ - Sequence[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - AtLeastN[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ExceptStarHandler]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - AtLeastN[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - AtMostN[ - Union[ - ExceptStarHandlerMatchType, - OneOf[ExceptStarHandlerMatchType], - AllOf[ExceptStarHandlerMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.ExceptStarHandler]], - ] - ], - ] = DoNotCare() - orelse: Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Else]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - AllOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - ] = DoNotCare() - finalbody: Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - ] - ], - AllOf[ - Union[ - Optional["Finally"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Finally]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Tuple( - BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode -): - elements: Union[ - Sequence[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - DoNotCareSentinel, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseElement]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - AtLeastN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - AtMostN[ - Union[ - BaseElementMatchType, - OneOf[BaseElementMatchType], - AllOf[BaseElementMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseElement]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeAlias(BaseSmallStatement, BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - type_parameters: Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - AllOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - ] = DoNotCare() - whitespace_after_type: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_name: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_type_parameters: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_equals: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -TypeVarOrTypeVarTupleOrParamSpecMatchType = Union[ - "TypeVar", - "TypeVarTuple", - "ParamSpec", - MetadataMatchType, - MatchIfTrue[Union[cst.TypeVar, cst.TypeVarTuple, cst.ParamSpec]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeParam(BaseMatcherNode): - param: Union[ - TypeVarOrTypeVarTupleOrParamSpecMatchType, - DoNotCareSentinel, - OneOf[TypeVarOrTypeVarTupleOrParamSpecMatchType], - AllOf[TypeVarOrTypeVarTupleOrParamSpecMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -TypeParamMatchType = Union["TypeParam", MetadataMatchType, MatchIfTrue[cst.TypeParam]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeParameters(BaseMatcherNode): - params: Union[ - Sequence[ - Union[ - TypeParamMatchType, - DoNotCareSentinel, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - AtLeastN[ - Union[ - TypeParamMatchType, - DoNotCareSentinel, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - AtMostN[ - Union[ - TypeParamMatchType, - DoNotCareSentinel, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.TypeParam]], - OneOf[ - Union[ - Sequence[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - AtLeastN[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - AtMostN[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.TypeParam]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - AtLeastN[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - AtMostN[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.TypeParam]], - ] - ], - ] = DoNotCare() - lbracket: Union[ - LeftSquareBracketMatchType, - DoNotCareSentinel, - OneOf[LeftSquareBracketMatchType], - AllOf[LeftSquareBracketMatchType], - ] = DoNotCare() - rbracket: Union[ - RightSquareBracketMatchType, - DoNotCareSentinel, - OneOf[RightSquareBracketMatchType], - AllOf[RightSquareBracketMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeVar(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - bound: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - colon: Union[ - ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeVarTuple(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - whitespace_after_star: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseUnaryOpMatchType = Union[ - "BaseUnaryOp", MetadataMatchType, MatchIfTrue[cst.BaseUnaryOp] -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class UnaryOperation(BaseExpression, BaseMatcherNode): - operator: Union[ - BaseUnaryOpMatchType, - DoNotCareSentinel, - OneOf[BaseUnaryOpMatchType], - AllOf[BaseUnaryOpMatchType], - ] = DoNotCare() - expression: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - test: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - orelse: Union[ - Optional["Else"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Else]], - DoNotCareSentinel, - OneOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - AllOf[ - Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - whitespace_after_while: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -WithItemMatchType = Union["WithItem", MetadataMatchType, MatchIfTrue[cst.WithItem]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): - items: Union[ - Sequence[ - Union[ - WithItemMatchType, - DoNotCareSentinel, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - AtLeastN[ - Union[ - WithItemMatchType, - DoNotCareSentinel, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - AtMostN[ - Union[ - WithItemMatchType, - DoNotCareSentinel, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.WithItem]], - OneOf[ - Union[ - Sequence[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - AtLeastN[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - AtMostN[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.WithItem]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - AtLeastN[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - AtMostN[ - Union[ - WithItemMatchType, - OneOf[WithItemMatchType], - AllOf[WithItemMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.WithItem]], - ] - ], - ] = DoNotCare() - body: Union[ - BaseSuiteMatchType, - DoNotCareSentinel, - OneOf[BaseSuiteMatchType], - AllOf[BaseSuiteMatchType], - ] = DoNotCare() - asynchronous: Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - AllOf[ - Union[ - Optional["Asynchronous"], - MetadataMatchType, - MatchIfTrue[Optional[cst.Asynchronous]], - ] - ], - ] = DoNotCare() - leading_lines: Union[ - Sequence[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - DoNotCareSentinel, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.EmptyLine]], - OneOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - AtLeastN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - AtMostN[ - Union[ - EmptyLineMatchType, - OneOf[EmptyLineMatchType], - AllOf[EmptyLineMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.EmptyLine]], - ] - ], - ] = DoNotCare() - lpar: Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] = DoNotCare() - rpar: Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] = DoNotCare() - whitespace_after_with: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_before_colon: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class WithItem(BaseMatcherNode): - item: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - asname: Union[ - Optional["AsName"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AsName]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - AllOf[ - Union[ - Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] - ] - ], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -BaseExpressionOrFromOrNoneMatchType = Union[ - "BaseExpression", - "From", - None, - MetadataMatchType, - MatchIfTrue[Union[cst.BaseExpression, cst.From, None]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class Yield(BaseExpression, BaseMatcherNode): - value: Union[ - BaseExpressionOrFromOrNoneMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionOrFromOrNoneMatchType], - AllOf[BaseExpressionOrFromOrNoneMatchType], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - whitespace_after_yield: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -__all__ = [ - "Add", - "AddAssign", - "AllOf", - "And", - "AnnAssign", - "Annotation", - "Arg", - "AsName", - "Assert", - "Assign", - "AssignEqual", - "AssignTarget", - "Asynchronous", - "AtLeastN", - "AtMostN", - "Attribute", - "AugAssign", - "Await", - "BaseAssignTargetExpression", - "BaseAugOp", - "BaseBinaryOp", - "BaseBooleanOp", - "BaseComp", - "BaseCompOp", - "BaseCompoundStatement", - "BaseDelTargetExpression", - "BaseDict", - "BaseDictElement", - "BaseElement", - "BaseExpression", - "BaseFormattedStringContent", - "BaseList", - "BaseMatcherNode", - "BaseMetadataProvider", - "BaseNumber", - "BaseParenthesizableWhitespace", - "BaseSet", - "BaseSimpleComp", - "BaseSlice", - "BaseSmallStatement", - "BaseStatement", - "BaseString", - "BaseSuite", - "BaseUnaryOp", - "BinaryOperation", - "BitAnd", - "BitAndAssign", - "BitInvert", - "BitOr", - "BitOrAssign", - "BitXor", - "BitXorAssign", - "BooleanOperation", - "Break", - "Call", - "ClassDef", - "Colon", - "Comma", - "Comment", - "CompFor", - "CompIf", - "Comparison", - "ComparisonTarget", - "ConcatenatedString", - "Continue", - "Decorator", - "Del", - "Dict", - "DictComp", - "DictElement", - "Divide", - "DivideAssign", - "DoNotCare", - "DoNotCareSentinel", - "DoesNotMatch", - "Dot", - "Element", - "Ellipsis", - "Else", - "EmptyLine", - "Equal", - "ExceptHandler", - "ExceptStarHandler", - "Expr", - "Finally", - "Float", - "FloorDivide", - "FloorDivideAssign", - "For", - "FormattedString", - "FormattedStringExpression", - "FormattedStringText", - "From", - "FunctionDef", - "GeneratorExp", - "Global", - "GreaterThan", - "GreaterThanEqual", - "If", - "IfExp", - "Imaginary", - "Import", - "ImportAlias", - "ImportFrom", - "ImportStar", - "In", - "IndentedBlock", - "Index", - "Integer", - "Is", - "IsNot", - "Lambda", - "LeftCurlyBrace", - "LeftParen", - "LeftShift", - "LeftShiftAssign", - "LeftSquareBracket", - "LessThan", - "LessThanEqual", - "List", - "ListComp", - "Match", - "MatchAs", - "MatchCase", - "MatchClass", - "MatchDecoratorMismatch", - "MatchIfTrue", - "MatchKeywordElement", - "MatchList", - "MatchMapping", - "MatchMappingElement", - "MatchMetadata", - "MatchMetadataIfTrue", - "MatchOr", - "MatchOrElement", - "MatchPattern", - "MatchRegex", - "MatchSequence", - "MatchSequenceElement", - "MatchSingleton", - "MatchStar", - "MatchTuple", - "MatchValue", - "MatcherDecoratableTransformer", - "MatcherDecoratableVisitor", - "MatrixMultiply", - "MatrixMultiplyAssign", - "Minus", - "Module", - "Modulo", - "ModuloAssign", - "Multiply", - "MultiplyAssign", - "Name", - "NameItem", - "NamedExpr", - "Newline", - "Nonlocal", - "Not", - "NotEqual", - "NotIn", - "OneOf", - "Or", - "Param", - "ParamSlash", - "ParamSpec", - "ParamStar", - "Parameters", - "ParenthesizedWhitespace", - "Pass", - "Plus", - "Power", - "PowerAssign", - "Raise", - "Return", - "RightCurlyBrace", - "RightParen", - "RightShift", - "RightShiftAssign", - "RightSquareBracket", - "SaveMatchedNode", - "Semicolon", - "Set", - "SetComp", - "SimpleStatementLine", - "SimpleStatementSuite", - "SimpleString", - "SimpleWhitespace", - "Slice", - "StarredDictElement", - "StarredElement", - "Subscript", - "SubscriptElement", - "Subtract", - "SubtractAssign", - "TrailingWhitespace", - "Try", - "TryStar", - "Tuple", - "TypeAlias", - "TypeOf", - "TypeParam", - "TypeParameters", - "TypeVar", - "TypeVarTuple", - "UnaryOperation", - "While", - "With", - "WithItem", - "Yield", - "ZeroOrMore", - "ZeroOrOne", - "call_if_inside", - "call_if_not_inside", - "extract", - "extractall", - "findall", - "leave", - "matches", - "replace", - "visit", -] +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +# This file was generated by libcst.codegen.gen_matcher_classes +from dataclasses import dataclass +from typing import Literal, Optional, Sequence, Union + +import libcst as cst +from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit + +from libcst.matchers._matcher_base import ( + AbstractBaseMatcherNodeMeta, + AllOf, + AtLeastN, + AtMostN, + BaseMatcherNode, + DoesNotMatch, + DoNotCare, + DoNotCareSentinel, + extract, + extractall, + findall, + matches, + MatchIfTrue, + MatchMetadata, + MatchMetadataIfTrue, + MatchRegex, + OneOf, + replace, + SaveMatchedNode, + TypeOf, + ZeroOrMore, + ZeroOrOne, +) +from libcst.matchers._visitors import ( + MatchDecoratorMismatch, + MatcherDecoratableTransformer, + MatcherDecoratableVisitor, +) + + +class _NodeABC(metaclass=AbstractBaseMatcherNodeMeta): + __slots__ = () + + +class BaseAssignTargetExpression(_NodeABC): + pass + + +class BaseAugOp(_NodeABC): + pass + + +class BaseBinaryOp(_NodeABC): + pass + + +class BaseBooleanOp(_NodeABC): + pass + + +class BaseComp(_NodeABC): + pass + + +class BaseCompOp(_NodeABC): + pass + + +class BaseCompoundStatement(_NodeABC): + pass + + +class BaseDelTargetExpression(_NodeABC): + pass + + +class BaseDict(_NodeABC): + pass + + +class BaseDictElement(_NodeABC): + pass + + +class BaseElement(_NodeABC): + pass + + +class BaseExpression(_NodeABC): + pass + + +class BaseFormattedStringContent(_NodeABC): + pass + + +class BaseList(_NodeABC): + pass + + +class BaseMetadataProvider(_NodeABC): + pass + + +class BaseNumber(_NodeABC): + pass + + +class BaseParenthesizableWhitespace(_NodeABC): + pass + + +class BaseSet(_NodeABC): + pass + + +class BaseSimpleComp(_NodeABC): + pass + + +class BaseSlice(_NodeABC): + pass + + +class BaseSmallStatement(_NodeABC): + pass + + +class BaseStatement(_NodeABC): + pass + + +class BaseString(_NodeABC): + pass + + +class BaseSuite(_NodeABC): + pass + + +class BaseUnaryOp(_NodeABC): + pass + + +MetadataMatchType = Union[MatchMetadata, MatchMetadataIfTrue] + + +BaseParenthesizableWhitespaceMatchType = Union[ + "BaseParenthesizableWhitespace", + MetadataMatchType, + MatchIfTrue[cst.BaseParenthesizableWhitespace], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Add(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AddAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class And(BaseBooleanOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseAssignTargetExpressionMatchType = Union[ + "BaseAssignTargetExpression", + MetadataMatchType, + MatchIfTrue[cst.BaseAssignTargetExpression], +] +AnnotationMatchType = Union[ + "Annotation", MetadataMatchType, MatchIfTrue[cst.Annotation] +] +AssignEqualMatchType = Union[ + "AssignEqual", MetadataMatchType, MatchIfTrue[cst.AssignEqual] +] +SemicolonMatchType = Union["Semicolon", MetadataMatchType, MatchIfTrue[cst.Semicolon]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AnnAssign(BaseSmallStatement, BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + annotation: Union[ + AnnotationMatchType, + DoNotCareSentinel, + OneOf[AnnotationMatchType], + AllOf[AnnotationMatchType], + ] = DoNotCare() + value: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + equal: Union[ + AssignEqualMatchType, + DoNotCareSentinel, + OneOf[AssignEqualMatchType], + AllOf[AssignEqualMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseExpressionMatchType = Union[ + "BaseExpression", MetadataMatchType, MatchIfTrue[cst.BaseExpression] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Annotation(BaseMatcherNode): + annotation: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + whitespace_before_indicator: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_indicator: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +CommaMatchType = Union["Comma", MetadataMatchType, MatchIfTrue[cst.Comma]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Arg(BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + keyword: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + equal: Union[ + AssignEqualMatchType, + DoNotCareSentinel, + OneOf[AssignEqualMatchType], + AllOf[AssignEqualMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + star: Union[ + Literal["", "*", "**"], + MetadataMatchType, + MatchIfTrue[Literal["", "*", "**"]], + DoNotCareSentinel, + OneOf[ + Union[ + Literal["", "*", "**"], + MetadataMatchType, + MatchIfTrue[Literal["", "*", "**"]], + ] + ], + AllOf[ + Union[ + Literal["", "*", "**"], + MetadataMatchType, + MatchIfTrue[Literal["", "*", "**"]], + ] + ], + ] = DoNotCare() + whitespace_after_star: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_arg: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +NameOrTupleOrListMatchType = Union[ + "Name", + "Tuple", + "List", + MetadataMatchType, + MatchIfTrue[Union[cst.Name, cst.Tuple, cst.List]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AsName(BaseMatcherNode): + name: Union[ + NameOrTupleOrListMatchType, + DoNotCareSentinel, + OneOf[NameOrTupleOrListMatchType], + AllOf[NameOrTupleOrListMatchType], + ] = DoNotCare() + whitespace_before_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +SimpleWhitespaceMatchType = Union[ + "SimpleWhitespace", MetadataMatchType, MatchIfTrue[cst.SimpleWhitespace] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Assert(BaseSmallStatement, BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + msg: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_after_assert: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +AssignTargetMatchType = Union[ + "AssignTarget", MetadataMatchType, MatchIfTrue[cst.AssignTarget] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Assign(BaseSmallStatement, BaseMatcherNode): + targets: Union[ + Sequence[ + Union[ + AssignTargetMatchType, + DoNotCareSentinel, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + AtLeastN[ + Union[ + AssignTargetMatchType, + DoNotCareSentinel, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + AtMostN[ + Union[ + AssignTargetMatchType, + DoNotCareSentinel, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.AssignTarget]], + OneOf[ + Union[ + Sequence[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + AtLeastN[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + AtMostN[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.AssignTarget]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + AtLeastN[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + AtMostN[ + Union[ + AssignTargetMatchType, + OneOf[AssignTargetMatchType], + AllOf[AssignTargetMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.AssignTarget]], + ] + ], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AssignEqual(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AssignTarget(BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + whitespace_before_equal: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_equal: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Asynchronous(BaseMatcherNode): + whitespace_after: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +NameMatchType = Union["Name", MetadataMatchType, MatchIfTrue[cst.Name]] +DotMatchType = Union["Dot", MetadataMatchType, MatchIfTrue[cst.Dot]] +LeftParenMatchType = Union["LeftParen", MetadataMatchType, MatchIfTrue[cst.LeftParen]] +RightParenMatchType = Union[ + "RightParen", MetadataMatchType, MatchIfTrue[cst.RightParen] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Attribute( + BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode +): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + attr: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + dot: Union[ + DotMatchType, DoNotCareSentinel, OneOf[DotMatchType], AllOf[DotMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseAugOpMatchType = Union["BaseAugOp", MetadataMatchType, MatchIfTrue[cst.BaseAugOp]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class AugAssign(BaseSmallStatement, BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + operator: Union[ + BaseAugOpMatchType, + DoNotCareSentinel, + OneOf[BaseAugOpMatchType], + AllOf[BaseAugOpMatchType], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Await(BaseExpression, BaseMatcherNode): + expression: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_await: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseBinaryOpMatchType = Union[ + "BaseBinaryOp", MetadataMatchType, MatchIfTrue[cst.BaseBinaryOp] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BinaryOperation(BaseExpression, BaseMatcherNode): + left: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + operator: Union[ + BaseBinaryOpMatchType, + DoNotCareSentinel, + OneOf[BaseBinaryOpMatchType], + AllOf[BaseBinaryOpMatchType], + ] = DoNotCare() + right: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitAnd(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitAndAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitInvert(BaseUnaryOp, BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitOr(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitOrAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitXor(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BitXorAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseBooleanOpMatchType = Union[ + "BaseBooleanOp", MetadataMatchType, MatchIfTrue[cst.BaseBooleanOp] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class BooleanOperation(BaseExpression, BaseMatcherNode): + left: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + operator: Union[ + BaseBooleanOpMatchType, + DoNotCareSentinel, + OneOf[BaseBooleanOpMatchType], + AllOf[BaseBooleanOpMatchType], + ] = DoNotCare() + right: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Break(BaseSmallStatement, BaseMatcherNode): + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ArgMatchType = Union["Arg", MetadataMatchType, MatchIfTrue[cst.Arg]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Call(BaseExpression, BaseMatcherNode): + func: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + args: Union[ + Sequence[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + AtMostN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Arg]], + OneOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_func: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_args: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseSuiteMatchType = Union["BaseSuite", MetadataMatchType, MatchIfTrue[cst.BaseSuite]] +DecoratorMatchType = Union["Decorator", MetadataMatchType, MatchIfTrue[cst.Decorator]] +EmptyLineMatchType = Union["EmptyLine", MetadataMatchType, MatchIfTrue[cst.EmptyLine]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + bases: Union[ + Sequence[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + AtMostN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Arg]], + OneOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + ] = DoNotCare() + keywords: Union[ + Sequence[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + AtMostN[ + Union[ + ArgMatchType, + DoNotCareSentinel, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Arg]], + OneOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ArgMatchType, + OneOf[ArgMatchType], + AllOf[ArgMatchType], + AtLeastN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + AtMostN[ + Union[ + ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Arg]], + ] + ], + ] = DoNotCare() + decorators: Union[ + Sequence[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Decorator]], + OneOf[ + Union[ + Sequence[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Decorator]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Decorator]], + ] + ], + ] = DoNotCare() + lpar: Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] = DoNotCare() + rpar: Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + lines_after_decorators: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_class: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_name: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + type_parameters: Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + AllOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + ] = DoNotCare() + whitespace_after_type_parameters: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Colon(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Comma(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +strMatchType = Union[str, MetadataMatchType, MatchIfTrue[str]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Comment(BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +CompIfMatchType = Union["CompIf", MetadataMatchType, MatchIfTrue[cst.CompIf]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class CompFor(BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + iter: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + ifs: Union[ + Sequence[ + Union[ + CompIfMatchType, + DoNotCareSentinel, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + AtLeastN[ + Union[ + CompIfMatchType, + DoNotCareSentinel, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + AtMostN[ + Union[ + CompIfMatchType, + DoNotCareSentinel, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.CompIf]], + OneOf[ + Union[ + Sequence[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + AtLeastN[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + AtMostN[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.CompIf]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + AtLeastN[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + AtMostN[ + Union[ + CompIfMatchType, + OneOf[CompIfMatchType], + AllOf[CompIfMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.CompIf]], + ] + ], + ] = DoNotCare() + inner_for_in: Union[ + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Optional[cst.CompFor]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Optional[cst.CompFor]], + ] + ], + AllOf[ + Union[ + Optional["CompFor"], + MetadataMatchType, + MatchIfTrue[Optional[cst.CompFor]], + ] + ], + ] = DoNotCare() + asynchronous: Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + ] = DoNotCare() + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_for: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_in: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_in: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class CompIf(BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_test: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ComparisonTargetMatchType = Union[ + "ComparisonTarget", MetadataMatchType, MatchIfTrue[cst.ComparisonTarget] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Comparison(BaseExpression, BaseMatcherNode): + left: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comparisons: Union[ + Sequence[ + Union[ + ComparisonTargetMatchType, + DoNotCareSentinel, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + AtLeastN[ + Union[ + ComparisonTargetMatchType, + DoNotCareSentinel, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + AtMostN[ + Union[ + ComparisonTargetMatchType, + DoNotCareSentinel, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ComparisonTarget]], + OneOf[ + Union[ + Sequence[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + AtLeastN[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + AtMostN[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ComparisonTarget]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + AtLeastN[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + AtMostN[ + Union[ + ComparisonTargetMatchType, + OneOf[ComparisonTargetMatchType], + AllOf[ComparisonTargetMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ComparisonTarget]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseCompOpMatchType = Union[ + "BaseCompOp", MetadataMatchType, MatchIfTrue[cst.BaseCompOp] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ComparisonTarget(BaseMatcherNode): + operator: Union[ + BaseCompOpMatchType, + DoNotCareSentinel, + OneOf[BaseCompOpMatchType], + AllOf[BaseCompOpMatchType], + ] = DoNotCare() + comparator: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +SimpleStringOrFormattedStringMatchType = Union[ + "SimpleString", + "FormattedString", + MetadataMatchType, + MatchIfTrue[Union[cst.SimpleString, cst.FormattedString]], +] +SimpleStringOrFormattedStringOrConcatenatedStringMatchType = Union[ + "SimpleString", + "FormattedString", + "ConcatenatedString", + MetadataMatchType, + MatchIfTrue[Union[cst.SimpleString, cst.FormattedString, cst.ConcatenatedString]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode): + left: Union[ + SimpleStringOrFormattedStringMatchType, + DoNotCareSentinel, + OneOf[SimpleStringOrFormattedStringMatchType], + AllOf[SimpleStringOrFormattedStringMatchType], + ] = DoNotCare() + right: Union[ + SimpleStringOrFormattedStringOrConcatenatedStringMatchType, + DoNotCareSentinel, + OneOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType], + AllOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_between: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Continue(BaseSmallStatement, BaseMatcherNode): + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +TrailingWhitespaceMatchType = Union[ + "TrailingWhitespace", MetadataMatchType, MatchIfTrue[cst.TrailingWhitespace] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Decorator(BaseMatcherNode): + decorator: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_at: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + trailing_whitespace: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseDelTargetExpressionMatchType = Union[ + "BaseDelTargetExpression", + MetadataMatchType, + MatchIfTrue[cst.BaseDelTargetExpression], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Del(BaseSmallStatement, BaseMatcherNode): + target: Union[ + BaseDelTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseDelTargetExpressionMatchType], + AllOf[BaseDelTargetExpressionMatchType], + ] = DoNotCare() + whitespace_after_del: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseDictElementMatchType = Union[ + "BaseDictElement", MetadataMatchType, MatchIfTrue[cst.BaseDictElement] +] +LeftCurlyBraceMatchType = Union[ + "LeftCurlyBrace", MetadataMatchType, MatchIfTrue[cst.LeftCurlyBrace] +] +RightCurlyBraceMatchType = Union[ + "RightCurlyBrace", MetadataMatchType, MatchIfTrue[cst.RightCurlyBrace] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Dict(BaseDict, BaseExpression, BaseMatcherNode): + elements: Union[ + Sequence[ + Union[ + BaseDictElementMatchType, + DoNotCareSentinel, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + AtLeastN[ + Union[ + BaseDictElementMatchType, + DoNotCareSentinel, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseDictElementMatchType, + DoNotCareSentinel, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseDictElement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + AtLeastN[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseDictElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + AtLeastN[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseDictElementMatchType, + OneOf[BaseDictElementMatchType], + AllOf[BaseDictElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseDictElement]], + ] + ], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +CompForMatchType = Union["CompFor", MetadataMatchType, MatchIfTrue[cst.CompFor]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): + key: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + for_in: Union[ + CompForMatchType, + DoNotCareSentinel, + OneOf[CompForMatchType], + AllOf[CompForMatchType], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class DictElement(BaseDictElement, BaseMatcherNode): + key: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Divide(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class DivideAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Dot(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Element(BaseElement, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Ellipsis(BaseExpression, BaseMatcherNode): + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Else(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +boolMatchType = Union[bool, MetadataMatchType, MatchIfTrue[bool]] +NewlineMatchType = Union["Newline", MetadataMatchType, MatchIfTrue[cst.Newline]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class EmptyLine(BaseMatcherNode): + indent: Union[ + boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] + ] = DoNotCare() + whitespace: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + comment: Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + ] + ], + AllOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + ] + ], + ] = DoNotCare() + newline: Union[ + NewlineMatchType, + DoNotCareSentinel, + OneOf[NewlineMatchType], + AllOf[NewlineMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Equal(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ExceptHandler(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + type: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + name: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_except: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ExceptStarHandler(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + type: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + name: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_except: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_star: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Expr(BaseSmallStatement, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Finally(BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Float(BaseExpression, BaseNumber, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FloorDivide(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FloorDivideAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + target: Union[ + BaseAssignTargetExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], + ] = DoNotCare() + iter: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + asynchronous: Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_for: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_in: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_in: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseFormattedStringContentMatchType = Union[ + "BaseFormattedStringContent", + MetadataMatchType, + MatchIfTrue[cst.BaseFormattedStringContent], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FormattedString(BaseExpression, BaseString, BaseMatcherNode): + parts: Union[ + Sequence[ + Union[ + BaseFormattedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + AtLeastN[ + Union[ + BaseFormattedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseFormattedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + AtLeastN[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + AtLeastN[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseFormattedStringContentMatchType, + OneOf[BaseFormattedStringContentMatchType], + AllOf[BaseFormattedStringContentMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseFormattedStringContent]], + ] + ], + ] = DoNotCare() + start: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + end: Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + DoNotCareSentinel, + OneOf[ + Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + ] + ], + AllOf[ + Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode): + expression: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + conversion: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + format_spec: Union[ + Optional[Sequence["BaseFormattedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional[Sequence["BaseFormattedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], + ] + ], + AllOf[ + Union[ + Optional[Sequence["BaseFormattedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]], + ] + ], + ] = DoNotCare() + whitespace_before_expression: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_expression: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + equal: Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + ] + ], + AllOf[ + Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FormattedStringText(BaseFormattedStringContent, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class From(BaseMatcherNode): + item: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + whitespace_before_from: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_from: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ParametersMatchType = Union[ + "Parameters", MetadataMatchType, MatchIfTrue[cst.Parameters] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + params: Union[ + ParametersMatchType, + DoNotCareSentinel, + OneOf[ParametersMatchType], + AllOf[ParametersMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + decorators: Union[ + Sequence[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + DoNotCareSentinel, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Decorator]], + OneOf[ + Union[ + Sequence[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Decorator]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + AtLeastN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + AtMostN[ + Union[ + DecoratorMatchType, + OneOf[DecoratorMatchType], + AllOf[DecoratorMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Decorator]], + ] + ], + ] = DoNotCare() + returns: Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + ] + ], + AllOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + ] + ], + ] = DoNotCare() + asynchronous: Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + lines_after_decorators: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_def: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_name: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_params: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + type_parameters: Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + AllOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + ] = DoNotCare() + whitespace_after_type_parameters: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): + elt: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + for_in: Union[ + CompForMatchType, + DoNotCareSentinel, + OneOf[CompForMatchType], + AllOf[CompForMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +NameItemMatchType = Union["NameItem", MetadataMatchType, MatchIfTrue[cst.NameItem]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Global(BaseSmallStatement, BaseMatcherNode): + names: Union[ + Sequence[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.NameItem]], + OneOf[ + Union[ + Sequence[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.NameItem]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.NameItem]], + ] + ], + ] = DoNotCare() + whitespace_after_global: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class GreaterThan(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class GreaterThanEqual(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +IfOrElseOrNoneMatchType = Union[ + "If", "Else", None, MetadataMatchType, MatchIfTrue[Union[cst.If, cst.Else, None]] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class If(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + orelse: Union[ + IfOrElseOrNoneMatchType, + DoNotCareSentinel, + OneOf[IfOrElseOrNoneMatchType], + AllOf[IfOrElseOrNoneMatchType], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_test: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_test: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class IfExp(BaseExpression, BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + body: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + orelse: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_before_if: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_if: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_else: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_else: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ImportAliasMatchType = Union[ + "ImportAlias", MetadataMatchType, MatchIfTrue[cst.ImportAlias] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Import(BaseSmallStatement, BaseMatcherNode): + names: Union[ + Sequence[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ImportAlias]], + OneOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + whitespace_after_import: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +AttributeOrNameMatchType = Union[ + "Attribute", "Name", MetadataMatchType, MatchIfTrue[Union[cst.Attribute, cst.Name]] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ImportAlias(BaseMatcherNode): + name: Union[ + AttributeOrNameMatchType, + DoNotCareSentinel, + OneOf[AttributeOrNameMatchType], + AllOf[AttributeOrNameMatchType], + ] = DoNotCare() + asname: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +AttributeOrNameOrNoneMatchType = Union[ + "Attribute", + "Name", + None, + MetadataMatchType, + MatchIfTrue[Union[cst.Attribute, cst.Name, None]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ImportFrom(BaseSmallStatement, BaseMatcherNode): + module: Union[ + AttributeOrNameOrNoneMatchType, + DoNotCareSentinel, + OneOf[AttributeOrNameOrNoneMatchType], + AllOf[AttributeOrNameOrNoneMatchType], + ] = DoNotCare() + names: Union[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + DoNotCareSentinel, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ImportAlias]], + OneOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + ], + "ImportStar", + MetadataMatchType, + MatchIfTrue[ + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + ] + ], + DoNotCareSentinel, + OneOf[ + Union[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + OneOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + ], + "ImportStar", + MetadataMatchType, + MatchIfTrue[ + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + ] + ], + ] + ], + AllOf[ + Union[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + OneOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + AtLeastN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + AtMostN[ + Union[ + ImportAliasMatchType, + OneOf[ImportAliasMatchType], + AllOf[ImportAliasMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ImportAlias]], + ] + ], + ], + "ImportStar", + MetadataMatchType, + MatchIfTrue[ + Union[ + Sequence[cst.ImportAlias], + cst.ImportStar, + OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]], + ] + ], + ] + ], + ] = DoNotCare() + relative: Union[ + Sequence[ + Union[ + DotMatchType, + DoNotCareSentinel, + OneOf[DotMatchType], + AllOf[DotMatchType], + AtLeastN[ + Union[ + DotMatchType, + DoNotCareSentinel, + OneOf[DotMatchType], + AllOf[DotMatchType], + ] + ], + AtMostN[ + Union[ + DotMatchType, + DoNotCareSentinel, + OneOf[DotMatchType], + AllOf[DotMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Dot]], + OneOf[ + Union[ + Sequence[ + Union[ + DotMatchType, + OneOf[DotMatchType], + AllOf[DotMatchType], + AtLeastN[ + Union[ + DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] + ] + ], + AtMostN[ + Union[ + DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Dot]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + DotMatchType, + OneOf[DotMatchType], + AllOf[DotMatchType], + AtLeastN[ + Union[ + DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] + ] + ], + AtMostN[ + Union[ + DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType] + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Dot]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftParen]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Optional["LeftParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightParen]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Optional["RightParen"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightParen]], + ] + ], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + whitespace_after_from: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_import: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_import: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ImportStar(BaseMatcherNode): + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class In(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseStatementMatchType = Union[ + "BaseStatement", MetadataMatchType, MatchIfTrue[cst.BaseStatement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class IndentedBlock(BaseSuite, BaseMatcherNode): + body: Union[ + Sequence[ + Union[ + BaseStatementMatchType, + DoNotCareSentinel, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + AtLeastN[ + Union[ + BaseStatementMatchType, + DoNotCareSentinel, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseStatementMatchType, + DoNotCareSentinel, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseStatement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + AtLeastN[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseStatement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + AtLeastN[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseStatementMatchType, + OneOf[BaseStatementMatchType], + AllOf[BaseStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseStatement]], + ] + ], + ] = DoNotCare() + header: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + indent: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + footer: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Index(BaseSlice, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + star: Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + ] + ], + AllOf[ + Union[ + Optional[Literal["*"]], + MetadataMatchType, + MatchIfTrue[Optional[Literal["*"]]], + ] + ], + ] = DoNotCare() + whitespace_after_star: Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + ] + ], + AllOf[ + Union[ + Optional["BaseParenthesizableWhitespace"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Integer(BaseExpression, BaseNumber, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Is(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class IsNot(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_between: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ColonMatchType = Union["Colon", MetadataMatchType, MatchIfTrue[cst.Colon]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Lambda(BaseExpression, BaseMatcherNode): + params: Union[ + ParametersMatchType, + DoNotCareSentinel, + OneOf[ParametersMatchType], + AllOf[ParametersMatchType], + ] = DoNotCare() + body: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + colon: Union[ + ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_lambda: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftCurlyBrace(BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftParen(BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftShift(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftShiftAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LeftSquareBracket(BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LessThan(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class LessThanEqual(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseElementMatchType = Union[ + "BaseElement", MetadataMatchType, MatchIfTrue[cst.BaseElement] +] +LeftSquareBracketMatchType = Union[ + "LeftSquareBracket", MetadataMatchType, MatchIfTrue[cst.LeftSquareBracket] +] +RightSquareBracketMatchType = Union[ + "RightSquareBracket", MetadataMatchType, MatchIfTrue[cst.RightSquareBracket] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class List( + BaseAssignTargetExpression, + BaseDelTargetExpression, + BaseExpression, + BaseList, + BaseMatcherNode, +): + elements: Union[ + Sequence[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseElement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + ] = DoNotCare() + lbracket: Union[ + LeftSquareBracketMatchType, + DoNotCareSentinel, + OneOf[LeftSquareBracketMatchType], + AllOf[LeftSquareBracketMatchType], + ] = DoNotCare() + rbracket: Union[ + RightSquareBracketMatchType, + DoNotCareSentinel, + OneOf[RightSquareBracketMatchType], + AllOf[RightSquareBracketMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNode): + elt: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + for_in: Union[ + CompForMatchType, + DoNotCareSentinel, + OneOf[CompForMatchType], + AllOf[CompForMatchType], + ] = DoNotCare() + lbracket: Union[ + LeftSquareBracketMatchType, + DoNotCareSentinel, + OneOf[LeftSquareBracketMatchType], + AllOf[LeftSquareBracketMatchType], + ] = DoNotCare() + rbracket: Union[ + RightSquareBracketMatchType, + DoNotCareSentinel, + OneOf[RightSquareBracketMatchType], + AllOf[RightSquareBracketMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchCaseMatchType = Union["MatchCase", MetadataMatchType, MatchIfTrue[cst.MatchCase]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Match(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + subject: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + cases: Union[ + Sequence[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + DoNotCareSentinel, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchCase]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchCase]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + AtLeastN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + AtMostN[ + Union[ + MatchCaseMatchType, + OneOf[MatchCaseMatchType], + AllOf[MatchCaseMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchCase]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_match: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + indent: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + footer: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchAs(BaseMatcherNode): + pattern: Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + ] + ], + AllOf[ + Union[ + Optional["MatchPattern"], + MetadataMatchType, + MatchIfTrue[Optional[cst.MatchPattern]], + ] + ], + ] = DoNotCare() + name: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + whitespace_before_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_as: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchPatternMatchType = Union[ + "MatchPattern", MetadataMatchType, MatchIfTrue[cst.MatchPattern] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchCase(BaseMatcherNode): + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + guard: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_case: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_if: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_if: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchSequenceElementMatchType = Union[ + "MatchSequenceElement", MetadataMatchType, MatchIfTrue[cst.MatchSequenceElement] +] +MatchKeywordElementMatchType = Union[ + "MatchKeywordElement", MetadataMatchType, MatchIfTrue[cst.MatchKeywordElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchClass(BaseMatcherNode): + cls: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + AtLeastN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementMatchType, + OneOf[MatchSequenceElementMatchType], + AllOf[MatchSequenceElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchSequenceElement]], + ] + ], + ] = DoNotCare() + kwds: Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + DoNotCareSentinel, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + AtLeastN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchKeywordElementMatchType, + OneOf[MatchKeywordElementMatchType], + AllOf[MatchKeywordElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchKeywordElement]], + ] + ], + ] = DoNotCare() + whitespace_after_cls: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_patterns: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_kwds: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchKeywordElement(BaseMatcherNode): + key: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_equal: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_equal: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchSequenceElementOrMatchStarMatchType = Union[ + "MatchSequenceElement", + "MatchStar", + MetadataMatchType, + MatchIfTrue[Union[cst.MatchSequenceElement, cst.MatchStar]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchList(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + ] = DoNotCare() + lbracket: Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + ] + ], + AllOf[ + Union[ + Optional["LeftSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.LeftSquareBracket]], + ] + ], + ] = DoNotCare() + rbracket: Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + ] + ], + AllOf[ + Union[ + Optional["RightSquareBracket"], + MetadataMatchType, + MatchIfTrue[Optional[cst.RightSquareBracket]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchMappingElementMatchType = Union[ + "MatchMappingElement", MetadataMatchType, MatchIfTrue[cst.MatchMappingElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchMapping(BaseMatcherNode): + elements: Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + DoNotCareSentinel, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchMappingElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchMappingElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + AtLeastN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchMappingElementMatchType, + OneOf[MatchMappingElementMatchType], + AllOf[MatchMappingElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchMappingElement]], + ] + ], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + rest: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + whitespace_before_rest: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + trailing_comma: Union[ + Optional["Comma"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comma]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] + ] + ], + AllOf[ + Union[ + Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]] + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchMappingElement(BaseMatcherNode): + key: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_colon: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +MatchOrElementMatchType = Union[ + "MatchOrElement", MetadataMatchType, MatchIfTrue[cst.MatchOrElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchOr(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + DoNotCareSentinel, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.MatchOrElement]], + OneOf[ + Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchOrElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + AtLeastN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + AtMostN[ + Union[ + MatchOrElementMatchType, + OneOf[MatchOrElementMatchType], + AllOf[MatchOrElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.MatchOrElement]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BitOrMatchType = Union["BitOr", MetadataMatchType, MatchIfTrue[cst.BitOr]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchOrElement(BaseMatcherNode): + pattern: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + separator: Union[ + BitOrMatchType, DoNotCareSentinel, OneOf[BitOrMatchType], AllOf[BitOrMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchPattern(BaseMatcherNode): + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSequence(BaseMatcherNode): + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSequenceElement(BaseMatcherNode): + value: Union[ + MatchPatternMatchType, + DoNotCareSentinel, + OneOf[MatchPatternMatchType], + AllOf[MatchPatternMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchSingleton(BaseMatcherNode): + value: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchStar(BaseMatcherNode): + name: Union[ + Optional["Name"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Name]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + AllOf[ + Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_name: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchTuple(BaseMatcherNode): + patterns: Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + DoNotCareSentinel, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + OneOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + AtLeastN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + AtMostN[ + Union[ + MatchSequenceElementOrMatchStarMatchType, + OneOf[MatchSequenceElementOrMatchStarMatchType], + AllOf[MatchSequenceElementOrMatchStarMatchType], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.MatchSequenceElement, + cst.MatchStar, + OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]], + ] + ] + ], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatchValue(BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatrixMultiply(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MatrixMultiplyAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Minus(BaseUnaryOp, BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +SimpleStatementLineOrBaseCompoundStatementMatchType = Union[ + "SimpleStatementLine", + "BaseCompoundStatement", + MetadataMatchType, + MatchIfTrue[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Module(BaseMatcherNode): + body: Union[ + Sequence[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + DoNotCareSentinel, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AtLeastN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + DoNotCareSentinel, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + ] + ], + AtMostN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + DoNotCareSentinel, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[ + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], + AllOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], + ] + ] + ], + OneOf[ + Union[ + Sequence[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AtLeastN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + AllOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + ] + ], + AtMostN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + AllOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + AllOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + ] + ] + ], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType], + AtLeastN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + AllOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + ] + ], + AtMostN[ + Union[ + SimpleStatementLineOrBaseCompoundStatementMatchType, + OneOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + AllOf[ + SimpleStatementLineOrBaseCompoundStatementMatchType + ], + ] + ], + ] + ], + MatchIfTrue[ + Sequence[ + Union[ + cst.SimpleStatementLine, + cst.BaseCompoundStatement, + OneOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + AllOf[ + Union[ + cst.SimpleStatementLine, cst.BaseCompoundStatement + ] + ], + ] + ] + ], + ] + ], + ] = DoNotCare() + header: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + footer: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + encoding: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + default_indent: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + default_newline: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + has_trailing_newline: Union[ + boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Modulo(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ModuloAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Multiply(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class MultiplyAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Name( + BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode +): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class NameItem(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class NamedExpr(BaseExpression, BaseMatcherNode): + target: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_before_walrus: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_walrus: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Newline(BaseMatcherNode): + value: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Nonlocal(BaseSmallStatement, BaseMatcherNode): + names: Union[ + Sequence[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + DoNotCareSentinel, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.NameItem]], + OneOf[ + Union[ + Sequence[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.NameItem]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + AtLeastN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + AtMostN[ + Union[ + NameItemMatchType, + OneOf[NameItemMatchType], + AllOf[NameItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.NameItem]], + ] + ], + ] = DoNotCare() + whitespace_after_nonlocal: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Not(BaseUnaryOp, BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class NotEqual(BaseCompOp, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class NotIn(BaseCompOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_between: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Or(BaseBooleanOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Param(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + annotation: Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + ] + ], + AllOf[ + Union[ + Optional["Annotation"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Annotation]], + ] + ], + ] = DoNotCare() + equal: Union[ + AssignEqualMatchType, + DoNotCareSentinel, + OneOf[AssignEqualMatchType], + AllOf[AssignEqualMatchType], + ] = DoNotCare() + default: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + star: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + whitespace_after_star: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_param: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ParamSlash(BaseMatcherNode): + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ParamSpec(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + whitespace_after_star: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ParamStar(BaseMatcherNode): + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ParamMatchType = Union["Param", MetadataMatchType, MatchIfTrue[cst.Param]] +ParamOrParamStarMatchType = Union[ + "Param", + "ParamStar", + MetadataMatchType, + MatchIfTrue[Union[cst.Param, cst.ParamStar]], +] +ParamSlashMatchType = Union[ + "ParamSlash", MetadataMatchType, MatchIfTrue[cst.ParamSlash] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Parameters(BaseMatcherNode): + params: Union[ + Sequence[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Param]], + OneOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + ] = DoNotCare() + star_arg: Union[ + ParamOrParamStarMatchType, + DoNotCareSentinel, + OneOf[ParamOrParamStarMatchType], + AllOf[ParamOrParamStarMatchType], + ] = DoNotCare() + kwonly_params: Union[ + Sequence[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Param]], + OneOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + ] = DoNotCare() + star_kwarg: Union[ + Optional["Param"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Param]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] + ] + ], + AllOf[ + Union[ + Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]] + ] + ], + ] = DoNotCare() + posonly_params: Union[ + Sequence[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + DoNotCareSentinel, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.Param]], + OneOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + AtLeastN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + AtMostN[ + Union[ + ParamMatchType, + OneOf[ParamMatchType], + AllOf[ParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.Param]], + ] + ], + ] = DoNotCare() + posonly_ind: Union[ + ParamSlashMatchType, + DoNotCareSentinel, + OneOf[ParamSlashMatchType], + AllOf[ParamSlashMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class ParenthesizedWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): + first_line: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + empty_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + indent: Union[ + boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType] + ] = DoNotCare() + last_line: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Pass(BaseSmallStatement, BaseMatcherNode): + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Plus(BaseUnaryOp, BaseMatcherNode): + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Power(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class PowerAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Raise(BaseSmallStatement, BaseMatcherNode): + exc: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + cause: Union[ + Optional["From"], + MetadataMatchType, + MatchIfTrue[Optional[cst.From]], + DoNotCareSentinel, + OneOf[ + Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] + ], + AllOf[ + Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]] + ], + ] = DoNotCare() + whitespace_after_raise: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Return(BaseSmallStatement, BaseMatcherNode): + value: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + whitespace_after_return: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightCurlyBrace(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightParen(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightShift(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightShiftAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class RightSquareBracket(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Semicolon(BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Set(BaseExpression, BaseSet, BaseMatcherNode): + elements: Union[ + Sequence[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseElement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode): + elt: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + for_in: Union[ + CompForMatchType, + DoNotCareSentinel, + OneOf[CompForMatchType], + AllOf[CompForMatchType], + ] = DoNotCare() + lbrace: Union[ + LeftCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[LeftCurlyBraceMatchType], + AllOf[LeftCurlyBraceMatchType], + ] = DoNotCare() + rbrace: Union[ + RightCurlyBraceMatchType, + DoNotCareSentinel, + OneOf[RightCurlyBraceMatchType], + AllOf[RightCurlyBraceMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseSmallStatementMatchType = Union[ + "BaseSmallStatement", MetadataMatchType, MatchIfTrue[cst.BaseSmallStatement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SimpleStatementLine(BaseStatement, BaseMatcherNode): + body: Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + trailing_whitespace: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SimpleStatementSuite(BaseSuite, BaseMatcherNode): + body: Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + DoNotCareSentinel, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + AtLeastN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + AtMostN[ + Union[ + BaseSmallStatementMatchType, + OneOf[BaseSmallStatementMatchType], + AllOf[BaseSmallStatementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseSmallStatement]], + ] + ], + ] = DoNotCare() + leading_whitespace: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + trailing_whitespace: Union[ + TrailingWhitespaceMatchType, + DoNotCareSentinel, + OneOf[TrailingWhitespaceMatchType], + AllOf[TrailingWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SimpleString(BaseExpression, BaseString, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SimpleWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Slice(BaseSlice, BaseMatcherNode): + lower: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + upper: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + step: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + first_colon: Union[ + ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] + ] = DoNotCare() + second_colon: Union[ + ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class StarredDictElement(BaseDictElement, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + whitespace_before_value: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class StarredElement(BaseElement, BaseExpression, BaseMatcherNode): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_before_value: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +SubscriptElementMatchType = Union[ + "SubscriptElement", MetadataMatchType, MatchIfTrue[cst.SubscriptElement] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Subscript( + BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode +): + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + slice: Union[ + Sequence[ + Union[ + SubscriptElementMatchType, + DoNotCareSentinel, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + AtLeastN[ + Union[ + SubscriptElementMatchType, + DoNotCareSentinel, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + AtMostN[ + Union[ + SubscriptElementMatchType, + DoNotCareSentinel, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.SubscriptElement]], + OneOf[ + Union[ + Sequence[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + AtLeastN[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + AtMostN[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.SubscriptElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + AtLeastN[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + AtMostN[ + Union[ + SubscriptElementMatchType, + OneOf[SubscriptElementMatchType], + AllOf[SubscriptElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.SubscriptElement]], + ] + ], + ] = DoNotCare() + lbracket: Union[ + LeftSquareBracketMatchType, + DoNotCareSentinel, + OneOf[LeftSquareBracketMatchType], + AllOf[LeftSquareBracketMatchType], + ] = DoNotCare() + rbracket: Union[ + RightSquareBracketMatchType, + DoNotCareSentinel, + OneOf[RightSquareBracketMatchType], + AllOf[RightSquareBracketMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_value: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseSliceMatchType = Union["BaseSlice", MetadataMatchType, MatchIfTrue[cst.BaseSlice]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SubscriptElement(BaseMatcherNode): + slice: Union[ + BaseSliceMatchType, + DoNotCareSentinel, + OneOf[BaseSliceMatchType], + AllOf[BaseSliceMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Subtract(BaseBinaryOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class SubtractAssign(BaseAugOp, BaseMatcherNode): + whitespace_before: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TrailingWhitespace(BaseMatcherNode): + whitespace: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + comment: Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + ] + ], + AllOf[ + Union[ + Optional["Comment"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Comment]], + ] + ], + ] = DoNotCare() + newline: Union[ + NewlineMatchType, + DoNotCareSentinel, + OneOf[NewlineMatchType], + AllOf[NewlineMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ExceptHandlerMatchType = Union[ + "ExceptHandler", MetadataMatchType, MatchIfTrue[cst.ExceptHandler] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + handlers: Union[ + Sequence[ + Union[ + ExceptHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + AtLeastN[ + Union[ + ExceptHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ExceptHandler]], + OneOf[ + Union[ + Sequence[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + AtLeastN[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptHandler]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + AtLeastN[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptHandlerMatchType, + OneOf[ExceptHandlerMatchType], + AllOf[ExceptHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptHandler]], + ] + ], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + finalbody: Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + AllOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +ExceptStarHandlerMatchType = Union[ + "ExceptStarHandler", MetadataMatchType, MatchIfTrue[cst.ExceptStarHandler] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TryStar(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + handlers: Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + DoNotCareSentinel, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + OneOf[ + Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + AtLeastN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + AtMostN[ + Union[ + ExceptStarHandlerMatchType, + OneOf[ExceptStarHandlerMatchType], + AllOf[ExceptStarHandlerMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.ExceptStarHandler]], + ] + ], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + finalbody: Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + AllOf[ + Union[ + Optional["Finally"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Finally]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Tuple( + BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode +): + elements: Union[ + Sequence[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + DoNotCareSentinel, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseElement]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + AtLeastN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + AtMostN[ + Union[ + BaseElementMatchType, + OneOf[BaseElementMatchType], + AllOf[BaseElementMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseElement]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeAlias(BaseSmallStatement, BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + value: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + type_parameters: Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + AllOf[ + Union[ + Optional["TypeParameters"], + MetadataMatchType, + MatchIfTrue[Optional[cst.TypeParameters]], + ] + ], + ] = DoNotCare() + whitespace_after_type: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_name: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_type_parameters: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_equals: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + semicolon: Union[ + SemicolonMatchType, + DoNotCareSentinel, + OneOf[SemicolonMatchType], + AllOf[SemicolonMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +TypeVarOrTypeVarTupleOrParamSpecMatchType = Union[ + "TypeVar", + "TypeVarTuple", + "ParamSpec", + MetadataMatchType, + MatchIfTrue[Union[cst.TypeVar, cst.TypeVarTuple, cst.ParamSpec]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeParam(BaseMatcherNode): + param: Union[ + TypeVarOrTypeVarTupleOrParamSpecMatchType, + DoNotCareSentinel, + OneOf[TypeVarOrTypeVarTupleOrParamSpecMatchType], + AllOf[TypeVarOrTypeVarTupleOrParamSpecMatchType], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + equal: Union[ + AssignEqualMatchType, + DoNotCareSentinel, + OneOf[AssignEqualMatchType], + AllOf[AssignEqualMatchType], + ] = DoNotCare() + star: Union[ + Literal["", "*"], + MetadataMatchType, + MatchIfTrue[Literal["", "*"]], + DoNotCareSentinel, + OneOf[ + Union[Literal["", "*"], MetadataMatchType, MatchIfTrue[Literal["", "*"]]] + ], + AllOf[ + Union[Literal["", "*"], MetadataMatchType, MatchIfTrue[Literal["", "*"]]] + ], + ] = DoNotCare() + whitespace_after_star: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + default: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +TypeParamMatchType = Union["TypeParam", MetadataMatchType, MatchIfTrue[cst.TypeParam]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeParameters(BaseMatcherNode): + params: Union[ + Sequence[ + Union[ + TypeParamMatchType, + DoNotCareSentinel, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + AtLeastN[ + Union[ + TypeParamMatchType, + DoNotCareSentinel, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + AtMostN[ + Union[ + TypeParamMatchType, + DoNotCareSentinel, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.TypeParam]], + OneOf[ + Union[ + Sequence[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + AtLeastN[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + AtMostN[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.TypeParam]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + AtLeastN[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + AtMostN[ + Union[ + TypeParamMatchType, + OneOf[TypeParamMatchType], + AllOf[TypeParamMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.TypeParam]], + ] + ], + ] = DoNotCare() + lbracket: Union[ + LeftSquareBracketMatchType, + DoNotCareSentinel, + OneOf[LeftSquareBracketMatchType], + AllOf[LeftSquareBracketMatchType], + ] = DoNotCare() + rbracket: Union[ + RightSquareBracketMatchType, + DoNotCareSentinel, + OneOf[RightSquareBracketMatchType], + AllOf[RightSquareBracketMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeVar(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + bound: Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + AllOf[ + Union[ + Optional["BaseExpression"], + MetadataMatchType, + MatchIfTrue[Optional[cst.BaseExpression]], + ] + ], + ] = DoNotCare() + colon: Union[ + ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TypeVarTuple(BaseMatcherNode): + name: Union[ + NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] + ] = DoNotCare() + whitespace_after_star: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseUnaryOpMatchType = Union[ + "BaseUnaryOp", MetadataMatchType, MatchIfTrue[cst.BaseUnaryOp] +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class UnaryOperation(BaseExpression, BaseMatcherNode): + operator: Union[ + BaseUnaryOpMatchType, + DoNotCareSentinel, + OneOf[BaseUnaryOpMatchType], + AllOf[BaseUnaryOpMatchType], + ] = DoNotCare() + expression: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + test: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + orelse: Union[ + Optional["Else"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Else]], + DoNotCareSentinel, + OneOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + AllOf[ + Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + whitespace_after_while: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +WithItemMatchType = Union["WithItem", MetadataMatchType, MatchIfTrue[cst.WithItem]] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode): + items: Union[ + Sequence[ + Union[ + WithItemMatchType, + DoNotCareSentinel, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + AtLeastN[ + Union[ + WithItemMatchType, + DoNotCareSentinel, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + AtMostN[ + Union[ + WithItemMatchType, + DoNotCareSentinel, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.WithItem]], + OneOf[ + Union[ + Sequence[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + AtLeastN[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + AtMostN[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.WithItem]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + AtLeastN[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + AtMostN[ + Union[ + WithItemMatchType, + OneOf[WithItemMatchType], + AllOf[WithItemMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.WithItem]], + ] + ], + ] = DoNotCare() + body: Union[ + BaseSuiteMatchType, + DoNotCareSentinel, + OneOf[BaseSuiteMatchType], + AllOf[BaseSuiteMatchType], + ] = DoNotCare() + asynchronous: Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + AllOf[ + Union[ + Optional["Asynchronous"], + MetadataMatchType, + MatchIfTrue[Optional[cst.Asynchronous]], + ] + ], + ] = DoNotCare() + leading_lines: Union[ + Sequence[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + DoNotCareSentinel, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.EmptyLine]], + OneOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + AtLeastN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + AtMostN[ + Union[ + EmptyLineMatchType, + OneOf[EmptyLineMatchType], + AllOf[EmptyLineMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.EmptyLine]], + ] + ], + ] = DoNotCare() + lpar: Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] = DoNotCare() + rpar: Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] = DoNotCare() + whitespace_after_with: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + whitespace_before_colon: Union[ + SimpleWhitespaceMatchType, + DoNotCareSentinel, + OneOf[SimpleWhitespaceMatchType], + AllOf[SimpleWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class WithItem(BaseMatcherNode): + item: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + asname: Union[ + Optional["AsName"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AsName]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + AllOf[ + Union[ + Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]] + ] + ], + ] = DoNotCare() + comma: Union[ + CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +BaseExpressionOrFromOrNoneMatchType = Union[ + "BaseExpression", + "From", + None, + MetadataMatchType, + MatchIfTrue[Union[cst.BaseExpression, cst.From, None]], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class Yield(BaseExpression, BaseMatcherNode): + value: Union[ + BaseExpressionOrFromOrNoneMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionOrFromOrNoneMatchType], + AllOf[BaseExpressionOrFromOrNoneMatchType], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + whitespace_after_yield: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +__all__ = [ + "Add", + "AddAssign", + "AllOf", + "And", + "AnnAssign", + "Annotation", + "Arg", + "AsName", + "Assert", + "Assign", + "AssignEqual", + "AssignTarget", + "Asynchronous", + "AtLeastN", + "AtMostN", + "Attribute", + "AugAssign", + "Await", + "BaseAssignTargetExpression", + "BaseAugOp", + "BaseBinaryOp", + "BaseBooleanOp", + "BaseComp", + "BaseCompOp", + "BaseCompoundStatement", + "BaseDelTargetExpression", + "BaseDict", + "BaseDictElement", + "BaseElement", + "BaseExpression", + "BaseFormattedStringContent", + "BaseList", + "BaseMatcherNode", + "BaseMetadataProvider", + "BaseNumber", + "BaseParenthesizableWhitespace", + "BaseSet", + "BaseSimpleComp", + "BaseSlice", + "BaseSmallStatement", + "BaseStatement", + "BaseString", + "BaseSuite", + "BaseUnaryOp", + "BinaryOperation", + "BitAnd", + "BitAndAssign", + "BitInvert", + "BitOr", + "BitOrAssign", + "BitXor", + "BitXorAssign", + "BooleanOperation", + "Break", + "Call", + "ClassDef", + "Colon", + "Comma", + "Comment", + "CompFor", + "CompIf", + "Comparison", + "ComparisonTarget", + "ConcatenatedString", + "Continue", + "Decorator", + "Del", + "Dict", + "DictComp", + "DictElement", + "Divide", + "DivideAssign", + "DoNotCare", + "DoNotCareSentinel", + "DoesNotMatch", + "Dot", + "Element", + "Ellipsis", + "Else", + "EmptyLine", + "Equal", + "ExceptHandler", + "ExceptStarHandler", + "Expr", + "Finally", + "Float", + "FloorDivide", + "FloorDivideAssign", + "For", + "FormattedString", + "FormattedStringExpression", + "FormattedStringText", + "From", + "FunctionDef", + "GeneratorExp", + "Global", + "GreaterThan", + "GreaterThanEqual", + "If", + "IfExp", + "Imaginary", + "Import", + "ImportAlias", + "ImportFrom", + "ImportStar", + "In", + "IndentedBlock", + "Index", + "Integer", + "Is", + "IsNot", + "Lambda", + "LeftCurlyBrace", + "LeftParen", + "LeftShift", + "LeftShiftAssign", + "LeftSquareBracket", + "LessThan", + "LessThanEqual", + "List", + "ListComp", + "Match", + "MatchAs", + "MatchCase", + "MatchClass", + "MatchDecoratorMismatch", + "MatchIfTrue", + "MatchKeywordElement", + "MatchList", + "MatchMapping", + "MatchMappingElement", + "MatchMetadata", + "MatchMetadataIfTrue", + "MatchOr", + "MatchOrElement", + "MatchPattern", + "MatchRegex", + "MatchSequence", + "MatchSequenceElement", + "MatchSingleton", + "MatchStar", + "MatchTuple", + "MatchValue", + "MatcherDecoratableTransformer", + "MatcherDecoratableVisitor", + "MatrixMultiply", + "MatrixMultiplyAssign", + "Minus", + "Module", + "Modulo", + "ModuloAssign", + "Multiply", + "MultiplyAssign", + "Name", + "NameItem", + "NamedExpr", + "Newline", + "Nonlocal", + "Not", + "NotEqual", + "NotIn", + "OneOf", + "Or", + "Param", + "ParamSlash", + "ParamSpec", + "ParamStar", + "Parameters", + "ParenthesizedWhitespace", + "Pass", + "Plus", + "Power", + "PowerAssign", + "Raise", + "Return", + "RightCurlyBrace", + "RightParen", + "RightShift", + "RightShiftAssign", + "RightSquareBracket", + "SaveMatchedNode", + "Semicolon", + "Set", + "SetComp", + "SimpleStatementLine", + "SimpleStatementSuite", + "SimpleString", + "SimpleWhitespace", + "Slice", + "StarredDictElement", + "StarredElement", + "Subscript", + "SubscriptElement", + "Subtract", + "SubtractAssign", + "TrailingWhitespace", + "Try", + "TryStar", + "Tuple", + "TypeAlias", + "TypeOf", + "TypeParam", + "TypeParameters", + "TypeVar", + "TypeVarTuple", + "UnaryOperation", + "While", + "With", + "WithItem", + "Yield", + "ZeroOrMore", + "ZeroOrOne", + "call_if_inside", + "call_if_not_inside", + "extract", + "extractall", + "findall", + "leave", + "matches", + "replace", + "visit", +] diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index 9d20a23a..d49e009c 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -1,375 +1,375 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -# This file was generated by libcst.codegen.gen_type_mapping -from typing import Dict as TypingDict, Type, Union - -from libcst._maybe_sentinel import MaybeSentinel -from libcst._nodes.base import CSTNode -from libcst._nodes.expression import ( - Annotation, - Arg, - Asynchronous, - Attribute, - Await, - BaseDictElement, - BaseElement, - BaseExpression, - BaseFormattedStringContent, - BaseSlice, - BinaryOperation, - BooleanOperation, - Call, - Comparison, - ComparisonTarget, - CompFor, - CompIf, - ConcatenatedString, - Dict, - DictComp, - DictElement, - Element, - Ellipsis, - Float, - FormattedString, - FormattedStringExpression, - FormattedStringText, - From, - GeneratorExp, - IfExp, - Imaginary, - Index, - Integer, - Lambda, - LeftCurlyBrace, - LeftParen, - LeftSquareBracket, - List, - ListComp, - Name, - NamedExpr, - Param, - Parameters, - ParamSlash, - ParamStar, - RightCurlyBrace, - RightParen, - RightSquareBracket, - Set, - SetComp, - SimpleString, - Slice, - StarredDictElement, - StarredElement, - Subscript, - SubscriptElement, - Tuple, - UnaryOperation, - Yield, -) -from libcst._nodes.module import Module - -from libcst._nodes.op import ( - Add, - AddAssign, - And, - AssignEqual, - BaseAugOp, - BaseBinaryOp, - BaseBooleanOp, - BaseCompOp, - BaseUnaryOp, - BitAnd, - BitAndAssign, - BitInvert, - BitOr, - BitOrAssign, - BitXor, - BitXorAssign, - Colon, - Comma, - Divide, - DivideAssign, - Dot, - Equal, - FloorDivide, - FloorDivideAssign, - GreaterThan, - GreaterThanEqual, - ImportStar, - In, - Is, - IsNot, - LeftShift, - LeftShiftAssign, - LessThan, - LessThanEqual, - MatrixMultiply, - MatrixMultiplyAssign, - Minus, - Modulo, - ModuloAssign, - Multiply, - MultiplyAssign, - Not, - NotEqual, - NotIn, - Or, - Plus, - Power, - PowerAssign, - RightShift, - RightShiftAssign, - Semicolon, - Subtract, - SubtractAssign, -) -from libcst._nodes.statement import ( - AnnAssign, - AsName, - Assert, - Assign, - AssignTarget, - AugAssign, - BaseSmallStatement, - BaseStatement, - BaseSuite, - Break, - ClassDef, - Continue, - Decorator, - Del, - Else, - ExceptHandler, - ExceptStarHandler, - Expr, - Finally, - For, - FunctionDef, - Global, - If, - Import, - ImportAlias, - ImportFrom, - IndentedBlock, - Match, - MatchAs, - MatchCase, - MatchClass, - MatchKeywordElement, - MatchList, - MatchMapping, - MatchMappingElement, - MatchOr, - MatchOrElement, - MatchPattern, - MatchSequence, - MatchSequenceElement, - MatchSingleton, - MatchStar, - MatchTuple, - MatchValue, - NameItem, - Nonlocal, - ParamSpec, - Pass, - Raise, - Return, - SimpleStatementLine, - SimpleStatementSuite, - Try, - TryStar, - TypeAlias, - TypeParam, - TypeParameters, - TypeVar, - TypeVarTuple, - While, - With, - WithItem, -) -from libcst._nodes.whitespace import ( - BaseParenthesizableWhitespace, - Comment, - EmptyLine, - Newline, - ParenthesizedWhitespace, - SimpleWhitespace, - TrailingWhitespace, -) -from libcst._removal_sentinel import RemovalSentinel - - -TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { - Add: BaseBinaryOp, - AddAssign: BaseAugOp, - And: BaseBooleanOp, - AnnAssign: Union[BaseSmallStatement, RemovalSentinel], - Annotation: Annotation, - Arg: Union[Arg, RemovalSentinel], - AsName: AsName, - Assert: Union[BaseSmallStatement, RemovalSentinel], - Assign: Union[BaseSmallStatement, RemovalSentinel], - AssignEqual: Union[AssignEqual, MaybeSentinel], - AssignTarget: Union[AssignTarget, RemovalSentinel], - Asynchronous: Asynchronous, - Attribute: BaseExpression, - AugAssign: Union[BaseSmallStatement, RemovalSentinel], - Await: BaseExpression, - BinaryOperation: BaseExpression, - BitAnd: BaseBinaryOp, - BitAndAssign: BaseAugOp, - BitInvert: BaseUnaryOp, - BitOr: Union[BaseBinaryOp, MaybeSentinel], - BitOrAssign: BaseAugOp, - BitXor: BaseBinaryOp, - BitXorAssign: BaseAugOp, - BooleanOperation: BaseExpression, - Break: Union[BaseSmallStatement, RemovalSentinel], - Call: BaseExpression, - ClassDef: Union[BaseStatement, RemovalSentinel], - Colon: Union[Colon, MaybeSentinel], - Comma: Union[Comma, MaybeSentinel], - Comment: Comment, - CompFor: CompFor, - CompIf: CompIf, - Comparison: BaseExpression, - ComparisonTarget: Union[ComparisonTarget, RemovalSentinel], - ConcatenatedString: BaseExpression, - Continue: Union[BaseSmallStatement, RemovalSentinel], - Decorator: Union[Decorator, RemovalSentinel], - Del: Union[BaseSmallStatement, RemovalSentinel], - Dict: BaseExpression, - DictComp: BaseExpression, - DictElement: Union[BaseDictElement, RemovalSentinel], - Divide: BaseBinaryOp, - DivideAssign: BaseAugOp, - Dot: Union[Dot, RemovalSentinel], - Element: Union[BaseElement, RemovalSentinel], - Ellipsis: BaseExpression, - Else: Else, - EmptyLine: Union[EmptyLine, RemovalSentinel], - Equal: BaseCompOp, - ExceptHandler: Union[ExceptHandler, RemovalSentinel], - ExceptStarHandler: Union[ExceptStarHandler, RemovalSentinel], - Expr: Union[BaseSmallStatement, RemovalSentinel], - Finally: Finally, - Float: BaseExpression, - FloorDivide: BaseBinaryOp, - FloorDivideAssign: BaseAugOp, - For: Union[BaseStatement, RemovalSentinel], - FormattedString: BaseExpression, - FormattedStringExpression: Union[BaseFormattedStringContent, RemovalSentinel], - FormattedStringText: Union[BaseFormattedStringContent, RemovalSentinel], - From: From, - FunctionDef: Union[BaseStatement, RemovalSentinel], - GeneratorExp: BaseExpression, - Global: Union[BaseSmallStatement, RemovalSentinel], - GreaterThan: BaseCompOp, - GreaterThanEqual: BaseCompOp, - If: Union[BaseStatement, RemovalSentinel], - IfExp: BaseExpression, - Imaginary: BaseExpression, - Import: Union[BaseSmallStatement, RemovalSentinel], - ImportAlias: Union[ImportAlias, RemovalSentinel], - ImportFrom: Union[BaseSmallStatement, RemovalSentinel], - ImportStar: ImportStar, - In: BaseCompOp, - IndentedBlock: BaseSuite, - Index: BaseSlice, - Integer: BaseExpression, - Is: BaseCompOp, - IsNot: BaseCompOp, - Lambda: BaseExpression, - LeftCurlyBrace: LeftCurlyBrace, - LeftParen: Union[LeftParen, MaybeSentinel, RemovalSentinel], - LeftShift: BaseBinaryOp, - LeftShiftAssign: BaseAugOp, - LeftSquareBracket: LeftSquareBracket, - LessThan: BaseCompOp, - LessThanEqual: BaseCompOp, - List: BaseExpression, - ListComp: BaseExpression, - Match: Union[BaseStatement, RemovalSentinel], - MatchAs: MatchPattern, - MatchCase: MatchCase, - MatchClass: MatchPattern, - MatchKeywordElement: Union[MatchKeywordElement, RemovalSentinel], - MatchList: MatchPattern, - MatchMapping: MatchPattern, - MatchMappingElement: Union[MatchMappingElement, RemovalSentinel], - MatchOr: MatchPattern, - MatchOrElement: Union[MatchOrElement, RemovalSentinel], - MatchPattern: MatchPattern, - MatchSequence: MatchPattern, - MatchSequenceElement: Union[MatchSequenceElement, RemovalSentinel], - MatchSingleton: MatchPattern, - MatchStar: MatchStar, - MatchTuple: MatchPattern, - MatchValue: MatchPattern, - MatrixMultiply: BaseBinaryOp, - MatrixMultiplyAssign: BaseAugOp, - Minus: BaseUnaryOp, - Module: Module, - Modulo: BaseBinaryOp, - ModuloAssign: BaseAugOp, - Multiply: BaseBinaryOp, - MultiplyAssign: BaseAugOp, - Name: BaseExpression, - NameItem: Union[NameItem, RemovalSentinel], - NamedExpr: BaseExpression, - Newline: Newline, - Nonlocal: Union[BaseSmallStatement, RemovalSentinel], - Not: BaseUnaryOp, - NotEqual: BaseCompOp, - NotIn: BaseCompOp, - Or: BaseBooleanOp, - Param: Union[Param, MaybeSentinel, RemovalSentinel], - ParamSlash: Union[ParamSlash, MaybeSentinel], - ParamSpec: ParamSpec, - ParamStar: Union[ParamStar, MaybeSentinel], - Parameters: Parameters, - ParenthesizedWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], - Pass: Union[BaseSmallStatement, RemovalSentinel], - Plus: BaseUnaryOp, - Power: BaseBinaryOp, - PowerAssign: BaseAugOp, - Raise: Union[BaseSmallStatement, RemovalSentinel], - Return: Union[BaseSmallStatement, RemovalSentinel], - RightCurlyBrace: RightCurlyBrace, - RightParen: Union[RightParen, MaybeSentinel, RemovalSentinel], - RightShift: BaseBinaryOp, - RightShiftAssign: BaseAugOp, - RightSquareBracket: RightSquareBracket, - Semicolon: Union[Semicolon, MaybeSentinel], - Set: BaseExpression, - SetComp: BaseExpression, - SimpleStatementLine: Union[BaseStatement, RemovalSentinel], - SimpleStatementSuite: BaseSuite, - SimpleString: BaseExpression, - SimpleWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], - Slice: BaseSlice, - StarredDictElement: Union[BaseDictElement, RemovalSentinel], - StarredElement: BaseExpression, - Subscript: BaseExpression, - SubscriptElement: Union[SubscriptElement, RemovalSentinel], - Subtract: BaseBinaryOp, - SubtractAssign: BaseAugOp, - TrailingWhitespace: TrailingWhitespace, - Try: Union[BaseStatement, RemovalSentinel], - TryStar: Union[BaseStatement, RemovalSentinel], - Tuple: BaseExpression, - TypeAlias: Union[BaseSmallStatement, RemovalSentinel], - TypeParam: Union[TypeParam, RemovalSentinel], - TypeParameters: TypeParameters, - TypeVar: TypeVar, - TypeVarTuple: TypeVarTuple, - UnaryOperation: BaseExpression, - While: Union[BaseStatement, RemovalSentinel], - With: Union[BaseStatement, RemovalSentinel], - WithItem: Union[WithItem, RemovalSentinel], - Yield: BaseExpression, -} +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +# This file was generated by libcst.codegen.gen_type_mapping +from typing import Dict as TypingDict, Type, Union + +from libcst._maybe_sentinel import MaybeSentinel +from libcst._nodes.base import CSTNode +from libcst._nodes.expression import ( + Annotation, + Arg, + Asynchronous, + Attribute, + Await, + BaseDictElement, + BaseElement, + BaseExpression, + BaseFormattedStringContent, + BaseSlice, + BinaryOperation, + BooleanOperation, + Call, + Comparison, + ComparisonTarget, + CompFor, + CompIf, + ConcatenatedString, + Dict, + DictComp, + DictElement, + Element, + Ellipsis, + Float, + FormattedString, + FormattedStringExpression, + FormattedStringText, + From, + GeneratorExp, + IfExp, + Imaginary, + Index, + Integer, + Lambda, + LeftCurlyBrace, + LeftParen, + LeftSquareBracket, + List, + ListComp, + Name, + NamedExpr, + Param, + Parameters, + ParamSlash, + ParamStar, + RightCurlyBrace, + RightParen, + RightSquareBracket, + Set, + SetComp, + SimpleString, + Slice, + StarredDictElement, + StarredElement, + Subscript, + SubscriptElement, + Tuple, + UnaryOperation, + Yield, +) +from libcst._nodes.module import Module + +from libcst._nodes.op import ( + Add, + AddAssign, + And, + AssignEqual, + BaseAugOp, + BaseBinaryOp, + BaseBooleanOp, + BaseCompOp, + BaseUnaryOp, + BitAnd, + BitAndAssign, + BitInvert, + BitOr, + BitOrAssign, + BitXor, + BitXorAssign, + Colon, + Comma, + Divide, + DivideAssign, + Dot, + Equal, + FloorDivide, + FloorDivideAssign, + GreaterThan, + GreaterThanEqual, + ImportStar, + In, + Is, + IsNot, + LeftShift, + LeftShiftAssign, + LessThan, + LessThanEqual, + MatrixMultiply, + MatrixMultiplyAssign, + Minus, + Modulo, + ModuloAssign, + Multiply, + MultiplyAssign, + Not, + NotEqual, + NotIn, + Or, + Plus, + Power, + PowerAssign, + RightShift, + RightShiftAssign, + Semicolon, + Subtract, + SubtractAssign, +) +from libcst._nodes.statement import ( + AnnAssign, + AsName, + Assert, + Assign, + AssignTarget, + AugAssign, + BaseSmallStatement, + BaseStatement, + BaseSuite, + Break, + ClassDef, + Continue, + Decorator, + Del, + Else, + ExceptHandler, + ExceptStarHandler, + Expr, + Finally, + For, + FunctionDef, + Global, + If, + Import, + ImportAlias, + ImportFrom, + IndentedBlock, + Match, + MatchAs, + MatchCase, + MatchClass, + MatchKeywordElement, + MatchList, + MatchMapping, + MatchMappingElement, + MatchOr, + MatchOrElement, + MatchPattern, + MatchSequence, + MatchSequenceElement, + MatchSingleton, + MatchStar, + MatchTuple, + MatchValue, + NameItem, + Nonlocal, + ParamSpec, + Pass, + Raise, + Return, + SimpleStatementLine, + SimpleStatementSuite, + Try, + TryStar, + TypeAlias, + TypeParam, + TypeParameters, + TypeVar, + TypeVarTuple, + While, + With, + WithItem, +) +from libcst._nodes.whitespace import ( + BaseParenthesizableWhitespace, + Comment, + EmptyLine, + Newline, + ParenthesizedWhitespace, + SimpleWhitespace, + TrailingWhitespace, +) +from libcst._removal_sentinel import RemovalSentinel + + +TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { + Add: BaseBinaryOp, + AddAssign: BaseAugOp, + And: BaseBooleanOp, + AnnAssign: Union[BaseSmallStatement, RemovalSentinel], + Annotation: Annotation, + Arg: Union[Arg, RemovalSentinel], + AsName: AsName, + Assert: Union[BaseSmallStatement, RemovalSentinel], + Assign: Union[BaseSmallStatement, RemovalSentinel], + AssignEqual: Union[AssignEqual, MaybeSentinel], + AssignTarget: Union[AssignTarget, RemovalSentinel], + Asynchronous: Asynchronous, + Attribute: BaseExpression, + AugAssign: Union[BaseSmallStatement, RemovalSentinel], + Await: BaseExpression, + BinaryOperation: BaseExpression, + BitAnd: BaseBinaryOp, + BitAndAssign: BaseAugOp, + BitInvert: BaseUnaryOp, + BitOr: Union[BaseBinaryOp, MaybeSentinel], + BitOrAssign: BaseAugOp, + BitXor: BaseBinaryOp, + BitXorAssign: BaseAugOp, + BooleanOperation: BaseExpression, + Break: Union[BaseSmallStatement, RemovalSentinel], + Call: BaseExpression, + ClassDef: Union[BaseStatement, RemovalSentinel], + Colon: Union[Colon, MaybeSentinel], + Comma: Union[Comma, MaybeSentinel], + Comment: Comment, + CompFor: CompFor, + CompIf: CompIf, + Comparison: BaseExpression, + ComparisonTarget: Union[ComparisonTarget, RemovalSentinel], + ConcatenatedString: BaseExpression, + Continue: Union[BaseSmallStatement, RemovalSentinel], + Decorator: Union[Decorator, RemovalSentinel], + Del: Union[BaseSmallStatement, RemovalSentinel], + Dict: BaseExpression, + DictComp: BaseExpression, + DictElement: Union[BaseDictElement, RemovalSentinel], + Divide: BaseBinaryOp, + DivideAssign: BaseAugOp, + Dot: Union[Dot, RemovalSentinel], + Element: Union[BaseElement, RemovalSentinel], + Ellipsis: BaseExpression, + Else: Else, + EmptyLine: Union[EmptyLine, RemovalSentinel], + Equal: BaseCompOp, + ExceptHandler: Union[ExceptHandler, RemovalSentinel], + ExceptStarHandler: Union[ExceptStarHandler, RemovalSentinel], + Expr: Union[BaseSmallStatement, RemovalSentinel], + Finally: Finally, + Float: BaseExpression, + FloorDivide: BaseBinaryOp, + FloorDivideAssign: BaseAugOp, + For: Union[BaseStatement, RemovalSentinel], + FormattedString: BaseExpression, + FormattedStringExpression: Union[BaseFormattedStringContent, RemovalSentinel], + FormattedStringText: Union[BaseFormattedStringContent, RemovalSentinel], + From: From, + FunctionDef: Union[BaseStatement, RemovalSentinel], + GeneratorExp: BaseExpression, + Global: Union[BaseSmallStatement, RemovalSentinel], + GreaterThan: BaseCompOp, + GreaterThanEqual: BaseCompOp, + If: Union[BaseStatement, RemovalSentinel], + IfExp: BaseExpression, + Imaginary: BaseExpression, + Import: Union[BaseSmallStatement, RemovalSentinel], + ImportAlias: Union[ImportAlias, RemovalSentinel], + ImportFrom: Union[BaseSmallStatement, RemovalSentinel], + ImportStar: ImportStar, + In: BaseCompOp, + IndentedBlock: BaseSuite, + Index: BaseSlice, + Integer: BaseExpression, + Is: BaseCompOp, + IsNot: BaseCompOp, + Lambda: BaseExpression, + LeftCurlyBrace: LeftCurlyBrace, + LeftParen: Union[LeftParen, MaybeSentinel, RemovalSentinel], + LeftShift: BaseBinaryOp, + LeftShiftAssign: BaseAugOp, + LeftSquareBracket: LeftSquareBracket, + LessThan: BaseCompOp, + LessThanEqual: BaseCompOp, + List: BaseExpression, + ListComp: BaseExpression, + Match: Union[BaseStatement, RemovalSentinel], + MatchAs: MatchPattern, + MatchCase: MatchCase, + MatchClass: MatchPattern, + MatchKeywordElement: Union[MatchKeywordElement, RemovalSentinel], + MatchList: MatchPattern, + MatchMapping: MatchPattern, + MatchMappingElement: Union[MatchMappingElement, RemovalSentinel], + MatchOr: MatchPattern, + MatchOrElement: Union[MatchOrElement, RemovalSentinel], + MatchPattern: MatchPattern, + MatchSequence: MatchPattern, + MatchSequenceElement: Union[MatchSequenceElement, RemovalSentinel], + MatchSingleton: MatchPattern, + MatchStar: MatchStar, + MatchTuple: MatchPattern, + MatchValue: MatchPattern, + MatrixMultiply: BaseBinaryOp, + MatrixMultiplyAssign: BaseAugOp, + Minus: BaseUnaryOp, + Module: Module, + Modulo: BaseBinaryOp, + ModuloAssign: BaseAugOp, + Multiply: BaseBinaryOp, + MultiplyAssign: BaseAugOp, + Name: BaseExpression, + NameItem: Union[NameItem, RemovalSentinel], + NamedExpr: BaseExpression, + Newline: Newline, + Nonlocal: Union[BaseSmallStatement, RemovalSentinel], + Not: BaseUnaryOp, + NotEqual: BaseCompOp, + NotIn: BaseCompOp, + Or: BaseBooleanOp, + Param: Union[Param, MaybeSentinel, RemovalSentinel], + ParamSlash: Union[ParamSlash, MaybeSentinel], + ParamSpec: ParamSpec, + ParamStar: Union[ParamStar, MaybeSentinel], + Parameters: Parameters, + ParenthesizedWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], + Pass: Union[BaseSmallStatement, RemovalSentinel], + Plus: BaseUnaryOp, + Power: BaseBinaryOp, + PowerAssign: BaseAugOp, + Raise: Union[BaseSmallStatement, RemovalSentinel], + Return: Union[BaseSmallStatement, RemovalSentinel], + RightCurlyBrace: RightCurlyBrace, + RightParen: Union[RightParen, MaybeSentinel, RemovalSentinel], + RightShift: BaseBinaryOp, + RightShiftAssign: BaseAugOp, + RightSquareBracket: RightSquareBracket, + Semicolon: Union[Semicolon, MaybeSentinel], + Set: BaseExpression, + SetComp: BaseExpression, + SimpleStatementLine: Union[BaseStatement, RemovalSentinel], + SimpleStatementSuite: BaseSuite, + SimpleString: BaseExpression, + SimpleWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], + Slice: BaseSlice, + StarredDictElement: Union[BaseDictElement, RemovalSentinel], + StarredElement: BaseExpression, + Subscript: BaseExpression, + SubscriptElement: Union[SubscriptElement, RemovalSentinel], + Subtract: BaseBinaryOp, + SubtractAssign: BaseAugOp, + TrailingWhitespace: TrailingWhitespace, + Try: Union[BaseStatement, RemovalSentinel], + TryStar: Union[BaseStatement, RemovalSentinel], + Tuple: BaseExpression, + TypeAlias: Union[BaseSmallStatement, RemovalSentinel], + TypeParam: Union[TypeParam, RemovalSentinel], + TypeParameters: TypeParameters, + TypeVar: TypeVar, + TypeVarTuple: TypeVarTuple, + UnaryOperation: BaseExpression, + While: Union[BaseStatement, RemovalSentinel], + With: Union[BaseStatement, RemovalSentinel], + WithItem: Union[WithItem, RemovalSentinel], + Yield: BaseExpression, +} diff --git a/libcst/tests/test_roundtrip.py b/libcst/tests/test_roundtrip.py index 010d840d..e3a7a35b 100644 --- a/libcst/tests/test_roundtrip.py +++ b/libcst/tests/test_roundtrip.py @@ -23,4 +23,5 @@ class RoundTripTests(TestCase): with self.subTest(file=str(file)): src = file.read_text(encoding="utf-8") mod = parse_module(src) + self.maxDiff = None self.assertEqual(mod.code, src) diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index 43bb6886..d40ef340 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -3476,21 +3476,44 @@ pub enum TypeVarLike<'a> { pub struct TypeParam<'a> { pub param: TypeVarLike<'a>, pub comma: Option>, + pub equal: Option>, + pub star: &'a str, + pub whitespace_after_star: SimpleWhitespace<'a>, + pub default: Option>, + pub star_tok: Option>, } impl<'a> Codegen<'a> for TypeParam<'a> { fn codegen(&self, state: &mut CodegenState<'a>) { self.param.codegen(state); + self.equal.codegen(state); + state.add_token(self.star); + self.whitespace_after_star.codegen(state); + self.default.codegen(state); self.comma.codegen(state); } } impl<'r, 'a> Inflate<'a> for DeflatedTypeParam<'r, 'a> { type Inflated = TypeParam<'a>; - fn inflate(self, config: &Config<'a>) -> Result { + fn inflate(mut self, config: &Config<'a>) -> Result { + let whitespace_after_star = if let Some(star_tok) = self.star_tok.as_mut() { + parse_simple_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())? + } else { + Default::default() + }; let param = self.param.inflate(config)?; + let equal = self.equal.inflate(config)?; + let default = self.default.inflate(config)?; let comma = self.comma.inflate(config)?; - Ok(Self::Inflated { param, comma }) + Ok(Self::Inflated { + param, + comma, + equal, + star: self.star, + whitespace_after_star, + default, + }) } } diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 71ea86e7..30a4b9bb 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -448,11 +448,20 @@ parser! { make_annotation(col, e) } - rule default() -> (AssignEqual<'input, 'a>, Expression<'input, 'a>) + rule default() -> (AssignEqual<'input, 'a>, Expression<'input, 'a>) = eq:lit("=") ex:expression() { (make_assign_equal(eq), ex) } + rule default_or_starred() -> (AssignEqual<'input, 'a>,Option>, Expression<'input, 'a>) + = eq:lit("=") ex:expression() { + (make_assign_equal(eq), None , ex) + } + / eq:lit("=") star:lit("*") ex:expression() { + // make_star_default(eq, star, ex) + (make_assign_equal(eq), Some(star) , ex) + } + // If statement rule if_stmt() -> If<'input, 'a> @@ -792,9 +801,10 @@ parser! { } rule type_param() -> TypeParam<'input, 'a> - = n:name() b:type_param_bound()? { make_type_var(n, b) } - / s:lit("*") n:name() { make_type_var_tuple(s, n) } - / s:lit("**") n:name() { make_param_spec(s, n) } + = n:name() b:type_param_bound()? def:default()? { make_type_var(n, b, def) } + / s:lit("*") n:name() def:default_or_starred()? { make_type_var_tuple(s, n, def) } + / s:lit("**") n:name() def:default()? { make_param_spec(s, n, def) } + rule type_param_bound() -> TypeParamBound<'input, 'a> = c:lit(":") e:expression() { make_type_param_bound(c, e) } @@ -3378,34 +3388,70 @@ fn make_type_param_bound<'input, 'a>( fn make_param_spec<'input, 'a>( star_tok: TokenRef<'input, 'a>, name: Name<'input, 'a>, + def: Option<(AssignEqual<'input, 'a>, Expression<'input, 'a>)>, ) -> TypeParam<'input, 'a> { + let (equal, default) = match def { + Some((a, b)) => (Some(a), Some(b)), + None => (None, None), + }; TypeParam { param: TypeVarLike::ParamSpec(ParamSpec { name, star_tok }), comma: Default::default(), + equal: equal, + star: "", + default: default, + star_tok: None, } } fn make_type_var_tuple<'input, 'a>( star_tok: TokenRef<'input, 'a>, name: Name<'input, 'a>, + def: Option<( + AssignEqual<'input, 'a>, + Option>, + Expression<'input, 'a>, + )>, ) -> TypeParam<'input, 'a> { + let (equal, default_star, default) = match def { + Some((a, b, c)) => (Some(a), b, Some(c)), + None => (None, None, None), + }; + let star = match default_star { + Some(a) => a.string, + None => "", + }; + TypeParam { param: TypeVarLike::TypeVarTuple(TypeVarTuple { name, star_tok }), comma: Default::default(), + equal: equal, + star: star, + default: default, + star_tok: default_star, } } fn make_type_var<'input, 'a>( name: Name<'input, 'a>, bound: Option>, + def: Option<(AssignEqual<'input, 'a>, Expression<'input, 'a>)>, ) -> TypeParam<'input, 'a> { let (bound, colon) = match bound { Some(TypeParamBound(c, e)) => (Some(Box::new(e)), Some(make_colon(c))), _ => (None, None), }; + let (equal, default) = match def { + Some((a, b)) => (Some(a), Some(b)), + None => (None, None), + }; TypeParam { param: TypeVarLike::TypeVar(TypeVar { name, bound, colon }), comma: Default::default(), + equal: equal, + star: "", + default: default, + star_tok: None, } } diff --git a/native/libcst/tests/fixtures/type_parameters.py b/native/libcst/tests/fixtures/type_parameters.py index e5329f01..ef6a39eb 100644 --- a/native/libcst/tests/fixtures/type_parameters.py +++ b/native/libcst/tests/fixtures/type_parameters.py @@ -54,4 +54,16 @@ type A [ T , * V ] =foo;type B=A def AAAAAAAAAAAAAAAAAA [ T : int ,*Ts , ** TT ] ():pass class AAAAAAAAAAAAAAAAAA [ T : int ,*Ts , ** TT ] :pass -def yikes[A:int,*B,**C](*d:*tuple[A,*B,...])->A:pass \ No newline at end of file +def yikes[A:int,*B,**C](*d:*tuple[A,*B,...])->A:pass + +def func[T=int, **U=float, *V=None](): pass + +class C[T=int, **U=float, *V=None]: pass + +type Alias[T = int, **U = float, *V = None] = int + +default = tuple[int, str] +type Alias[*Ts = *default] = Ts +type Foo[ * T = * default ] = int +type Foo[*T=*default ]=int +type Foo [ * T = * default ] = int \ No newline at end of file From db696e63489124ff3d2206b708c50d65d6374dbe Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 21 May 2024 15:52:49 -0400 Subject: [PATCH 479/632] fix: don't reset context.scratch between files (#1151) #453 fixed scratch leaking between files by setting it to empty, but that drops all the scratch space that was set up before the codemod runs (e.g. in the transformer's constructor) This PR improves the fix by preserving the initial scratch. --- libcst/codemod/_cli.py | 5 ++++- libcst/tests/test_e2e.py | 9 ++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 3958ef05..8bfc11f8 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -14,6 +14,7 @@ import subprocess import sys import time import traceback +from copy import deepcopy from dataclasses import dataclass, replace from multiprocessing import cpu_count, Pool from pathlib import Path @@ -214,6 +215,7 @@ def _execute_transform( # noqa: C901 transformer: Codemod, filename: str, config: ExecutionConfig, + scratch: Dict[str, object], ) -> ExecutionResult: for pattern in config.blacklist_patterns: if re.fullmatch(pattern, filename): @@ -251,7 +253,7 @@ def _execute_transform( # noqa: C901 transformer.context = replace( transformer.context, filename=filename, - scratch={}, + scratch=deepcopy(scratch), ) # determine the module and package name for this file @@ -634,6 +636,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 "transformer": transform, "filename": filename, "config": config, + "scratch": transform.context.scratch, } for filename in files ] diff --git a/libcst/tests/test_e2e.py b/libcst/tests/test_e2e.py index 219192fb..6c106b36 100644 --- a/libcst/tests/test_e2e.py +++ b/libcst/tests/test_e2e.py @@ -2,7 +2,7 @@ import contextlib import os from pathlib import Path from tempfile import TemporaryDirectory -from typing import Generator +from typing import Dict, Generator from unittest import TestCase from libcst import BaseExpression, Call, matchers as m, Name @@ -16,7 +16,14 @@ from libcst.codemod.visitors import AddImportsVisitor class PrintToPPrintCommand(VisitorBasedCodemodCommand): + def __init__(self, context: CodemodContext, **kwargs: Dict[str, object]) -> None: + super().__init__(context, **kwargs) + self.context.scratch["PPRINT_WAS_HERE"] = True + def leave_Call(self, original_node: Call, updated_node: Call) -> BaseExpression: + if not self.context.scratch["PPRINT_WAS_HERE"]: + raise AssertionError("Scratch space lost") + if m.matches(updated_node, m.Call(func=m.Name("print"))): AddImportsVisitor.add_needed_import( self.context, From 7b9907a56041cd524c2b09f9eed19a3f15d423af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 May 2024 08:41:05 -0400 Subject: [PATCH 480/632] Bump pypa/cibuildwheel from 2.17.0 to 2.18.0 (#1145) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.17.0 to 2.18.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.17.0...v2.18.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f3ed96ae..b60e6b9a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.17.0 + uses: pypa/cibuildwheel@v2.18.0 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 96f53416e3f4bb595d78dc645f8601f280a9d386 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 22 May 2024 10:20:33 -0400 Subject: [PATCH 481/632] Bump version to 1.4.0 (#1152) --- CHANGELOG.md | 25 +++++++++++++++++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 30 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f5407c0..1bccbc23 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,28 @@ +# 1.4.0 - 2024-05-22 + +## Fixed +* Fix Literal parse error in RemoveImportsVisitor by @camillol in https://github.com/Instagram/LibCST/pull/1130 +* Don't reset context.scratch between files by @zsol in https://github.com/Instagram/LibCST/pull/1151 +* Various documentation fixes + * Typo fix FullRepoManager by @kit1980 in https://github.com/Instagram/LibCST/pull/1138 + * ✏️ Fix tiny typo in `docs/source/metadata.rst` by @tiangolo in https://github.com/Instagram/LibCST/pull/1134 + * ✏️ Fix typo in `docs/source/scope_tutorial.ipynb` by @tiangolo in https://github.com/Instagram/LibCST/pull/1135 + * Update CONTRIBUTING.md by @zaicruvoir1rominet in https://github.com/Instagram/LibCST/pull/1142 + +## Added + +* Add helper functions for common ways of filtering nodes by @zaicruvoir1rominet in https://github.com/Instagram/LibCST/pull/1137 +* Dump CST to .dot (graphviz) files by @zaicruvoir1rominet in https://github.com/Instagram/LibCST/pull/1147 +* Implement PEP-696 by @thereversiblewheel in https://github.com/Instagram/LibCST/pull/1141 + +## New Contributors +* @tiangolo made their first contribution in https://github.com/Instagram/LibCST/pull/1134 +* @camillol made their first contribution in https://github.com/Instagram/LibCST/pull/1130 +* @zaicruvoir1rominet made their first contribution in https://github.com/Instagram/LibCST/pull/1142 +* @thereversiblewheel made their first contribution in https://github.com/Instagram/LibCST/pull/1141 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.3.1...v1.4.0 + # 1.3.1 - 2024-04-03 ## Fixed diff --git a/native/Cargo.lock b/native/Cargo.lock index 77390083..e62007e4 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -355,7 +355,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.3.1" +version = "1.4.0" dependencies = [ "chic", "criterion", @@ -373,7 +373,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.3.1" +version = "1.4.0" dependencies = [ "quote", "syn 2.0.41", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index a0d92de3..2fd6ced5 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.3.1" +version = "1.4.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" memchr = "2.5.0" -libcst_derive = { path = "../libcst_derive", version = "1.3.1" } +libcst_derive = { path = "../libcst_derive", version = "1.4.0" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 127c8378..e2eee2da 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.3.1" +version = "1.4.0" edition = "2018" description = "Proc macro helpers for libcst." license-file = "LICENSE" From 0b4016c5b3801d7701cf0209a2cb25be5fd64a3f Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 26 May 2024 22:57:47 +0100 Subject: [PATCH 482/632] use trusted publishing for pypi (#1154) --- .github/workflows/pypi_upload.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 6526a002..ee9990cb 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -16,6 +16,8 @@ jobs: name: Upload wheels to pypi runs-on: ubuntu-latest needs: build + permissions: + id-token: write steps: - uses: actions/checkout@v4 with: @@ -46,14 +48,10 @@ jobs: if: github.event_name == 'push' uses: pypa/gh-action-pypi-publish@release/v1 with: - user: __token__ - password: ${{ secrets.TEST_PYPI_API_TOKEN }} repository-url: https://test.pypi.org/legacy/ packages-dir: ${{ steps.download.outputs.download-path }} - name: Publish distribution 📦 to PyPI if: github.event_name == 'release' uses: pypa/gh-action-pypi-publish@release/v1 with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} packages-dir: ${{ steps.download.outputs.download-path }} From 47ff8cbf222ca57c8dbd2139885576f58403029a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jun 2024 20:45:18 +0100 Subject: [PATCH 483/632] Update maturin requirement from <1.6,>=0.8.3 to >=0.8.3,<1.7 (#1158) Updates the requirements on [maturin](https://github.com/pyo3/maturin) to permit the latest version. - [Release notes](https://github.com/pyo3/maturin/releases) - [Changelog](https://github.com/PyO3/maturin/blob/main/Changelog.md) - [Commits](https://github.com/pyo3/maturin/compare/v0.8.3...v1.6.0) --- updated-dependencies: - dependency-name: maturin dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 260c8655..a61f88b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dev = [ "hypothesis>=4.36.0", "hypothesmith>=0.0.4", "jupyter>=1.0.0", - "maturin>=0.8.3,<1.6", + "maturin>=0.8.3,<1.7", "nbsphinx>=0.4.2", "prompt-toolkit>=2.0.9", "pyre-check==0.9.18; platform_system != 'Windows'", From 9f6e27600f04f09274b5c86640876600d7fa2d01 Mon Sep 17 00:00:00 2001 From: Camillo Date: Wed, 12 Jun 2024 02:36:50 -0700 Subject: [PATCH 484/632] FullyQualifiedNameProvider: Optionally consider pyproject.toml files when determining a file's module name and package (#1148) --- libcst/helpers/module.py | 20 ++++- libcst/helpers/tests/test_module.py | 75 ++++++++++++++++++- libcst/metadata/base_provider.py | 16 +++- libcst/metadata/file_path_provider.py | 4 +- libcst/metadata/full_repo_manager.py | 7 +- libcst/metadata/name_provider.py | 16 +++- .../metadata/tests/test_metadata_wrapper.py | 8 +- libcst/metadata/tests/test_name_provider.py | 2 +- libcst/metadata/type_inference_provider.py | 12 +-- 9 files changed, 140 insertions(+), 20 deletions(-) diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 3c26122d..37e6af08 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -5,7 +5,7 @@ # from dataclasses import dataclass from itertools import islice -from pathlib import PurePath +from pathlib import Path, PurePath from typing import List, Optional from libcst import Comment, EmptyLine, ImportFrom, Module @@ -132,11 +132,25 @@ class ModuleNameAndPackage: def calculate_module_and_package( - repo_root: StrPath, filename: StrPath + repo_root: StrPath, filename: StrPath, use_pyproject_toml: bool = False ) -> ModuleNameAndPackage: # Given an absolute repo_root and an absolute filename, calculate the # python module name for the file. - relative_filename = PurePath(filename).relative_to(repo_root) + if use_pyproject_toml: + # But also look for pyproject.toml files, indicating nested packages in the repo. + abs_repo_root = Path(repo_root).resolve() + abs_filename = Path(filename).resolve() + package_root = abs_filename.parent + while package_root != abs_repo_root: + if (package_root / "pyproject.toml").exists(): + break + if package_root == package_root.parent: + break + package_root = package_root.parent + + relative_filename = abs_filename.relative_to(package_root) + else: + relative_filename = PurePath(filename).relative_to(repo_root) relative_filename = relative_filename.with_suffix("") # handle special cases diff --git a/libcst/helpers/tests/test_module.py b/libcst/helpers/tests/test_module.py index 7260f5cc..815e1fa2 100644 --- a/libcst/helpers/tests/test_module.py +++ b/libcst/helpers/tests/test_module.py @@ -3,7 +3,9 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -from typing import Optional +from pathlib import Path, PurePath +from typing import Any, Optional +from unittest.mock import patch import libcst as cst from libcst.helpers.common import ensure_type @@ -251,6 +253,77 @@ class ModuleTest(UnitTest): calculate_module_and_package(repo_root, filename), module_and_package ) + @data_provider( + ( + ("foo/foo/__init__.py", ModuleNameAndPackage("foo", "foo")), + ("foo/foo/file.py", ModuleNameAndPackage("foo.file", "foo")), + ( + "foo/foo/sub/subfile.py", + ModuleNameAndPackage("foo.sub.subfile", "foo.sub"), + ), + ("libs/bar/bar/thing.py", ModuleNameAndPackage("bar.thing", "bar")), + ( + "noproj/some/file.py", + ModuleNameAndPackage("noproj.some.file", "noproj.some"), + ), + ) + ) + def test_calculate_module_and_package_using_pyproject_toml( + self, + rel_path: str, + module_and_package: Optional[ModuleNameAndPackage], + ) -> None: + mock_tree: dict[str, Any] = { + "home": { + "user": { + "root": { + "foo": { + "pyproject.toml": "content", + "foo": { + "__init__.py": "content", + "file.py": "content", + "sub": { + "subfile.py": "content", + }, + }, + }, + "libs": { + "bar": { + "pyproject.toml": "content", + "bar": { + "__init__.py": "content", + "thing.py": "content", + }, + } + }, + "noproj": { + "some": { + "file.py": "content", + } + }, + }, + }, + }, + } + repo_root = Path("/home/user/root").resolve() + fake_root: Path = repo_root.parent.parent.parent + + def mock_exists(path: PurePath) -> bool: + parts = path.relative_to(fake_root).parts + subtree = mock_tree + for part in parts: + if (subtree := subtree.get(part)) is None: + return False + return True + + with patch("pathlib.Path.exists", new=mock_exists): + self.assertEqual( + calculate_module_and_package( + repo_root, repo_root / rel_path, use_pyproject_toml=True + ), + module_and_package, + ) + @data_provider( ( # Providing a file outside the root should raise an exception diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index 1c113f57..2e03416f 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -6,12 +6,12 @@ from pathlib import Path from types import MappingProxyType from typing import ( - Callable, Generic, List, Mapping, MutableMapping, Optional, + Protocol, Type, TYPE_CHECKING, TypeVar, @@ -40,6 +40,18 @@ _ProvidedMetadataT = TypeVar("_ProvidedMetadataT", covariant=True) MaybeLazyMetadataT = Union[LazyValue[_ProvidedMetadataT], _ProvidedMetadataT] +class GenCacheMethod(Protocol): + def __call__( + self, + root_path: Path, + paths: List[str], + *, + timeout: Optional[int] = None, + use_pyproject_toml: bool = False, + ) -> Mapping[str, object]: + ... + + # We can't use an ABCMeta here, because of metaclass conflicts class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): """ @@ -59,7 +71,7 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): #: Implement gen_cache to indicate the metadata provider depends on cache from external #: system. This function will be called by :class:`~libcst.metadata.FullRepoManager` #: to compute required cache object per file path. - gen_cache: Optional[Callable[[Path, List[str], int], Mapping[str, object]]] = None + gen_cache: Optional[GenCacheMethod] = None def __init__(self, cache: object = None) -> None: super().__init__() diff --git a/libcst/metadata/file_path_provider.py b/libcst/metadata/file_path_provider.py index 5ed9baa6..6ab01b5f 100644 --- a/libcst/metadata/file_path_provider.py +++ b/libcst/metadata/file_path_provider.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. from pathlib import Path -from typing import List, Mapping, Optional +from typing import Any, List, Mapping, Optional import libcst as cst from libcst.metadata.base_provider import BatchableMetadataProvider @@ -41,7 +41,7 @@ class FilePathProvider(BatchableMetadataProvider[Path]): @classmethod def gen_cache( - cls, root_path: Path, paths: List[str], timeout: Optional[int] = None + cls, root_path: Path, paths: List[str], **kwargs: Any ) -> Mapping[str, Path]: cache = {path: (root_path / path).resolve() for path in paths} return cache diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index 83bb6e83..770ba1f6 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -22,6 +22,7 @@ class FullRepoManager: paths: Collection[str], providers: Collection["ProviderT"], timeout: int = 5, + use_pyproject_toml: bool = False, ) -> None: """ Given project root directory with pyre and watchman setup, :class:`~libcst.metadata.FullRepoManager` @@ -38,6 +39,7 @@ class FullRepoManager: self.root_path: Path = Path(repo_root_dir) self._cache: Dict["ProviderT", Mapping[str, object]] = {} self._timeout = timeout + self._use_pyproject_toml = use_pyproject_toml self._providers = providers self._paths: List[str] = list(paths) @@ -65,7 +67,10 @@ class FullRepoManager: handler = provider.gen_cache if handler: cache[provider] = handler( - self.root_path, self._paths, self._timeout + self.root_path, + self._paths, + timeout=self._timeout, + use_pyproject_toml=self._use_pyproject_toml, ) self._cache = cache diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py index 1868fa66..7de76eb5 100644 --- a/libcst/metadata/name_provider.py +++ b/libcst/metadata/name_provider.py @@ -5,7 +5,7 @@ import dataclasses from pathlib import Path -from typing import Collection, List, Mapping, Optional, Union +from typing import Any, Collection, List, Mapping, Optional, Union import libcst as cst from libcst._metadata_dependent import LazyValue, MetadataDependent @@ -112,9 +112,19 @@ class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedN @classmethod def gen_cache( - cls, root_path: Path, paths: List[str], timeout: Optional[int] = None + cls, + root_path: Path, + paths: List[str], + *, + use_pyproject_toml: bool = False, + **kwargs: Any, ) -> Mapping[str, ModuleNameAndPackage]: - cache = {path: calculate_module_and_package(root_path, path) for path in paths} + cache = { + path: calculate_module_and_package( + root_path, path, use_pyproject_toml=use_pyproject_toml + ) + for path in paths + } return cache def __init__(self, cache: ModuleNameAndPackage) -> None: diff --git a/libcst/metadata/tests/test_metadata_wrapper.py b/libcst/metadata/tests/test_metadata_wrapper.py index ee61e14f..9063a99a 100644 --- a/libcst/metadata/tests/test_metadata_wrapper.py +++ b/libcst/metadata/tests/test_metadata_wrapper.py @@ -48,9 +48,13 @@ class MetadataWrapperTest(UnitTest): self.assertNotEqual(hash(mw1), hash(mw3)) self.assertNotEqual(hash(mw2), hash(mw3)) + @staticmethod + def ignore_args(*args: object, **kwargs: object) -> tuple[object, ...]: + return (args, kwargs) + def test_metadata_cache(self) -> None: class DummyMetadataProvider(BatchableMetadataProvider[None]): - gen_cache = tuple + gen_cache = self.ignore_args m = cst.parse_module("pass") mw = MetadataWrapper(m) @@ -60,7 +64,7 @@ class MetadataWrapperTest(UnitTest): mw.resolve(DummyMetadataProvider) class SimpleCacheMetadataProvider(BatchableMetadataProvider[object]): - gen_cache = tuple + gen_cache = self.ignore_args def __init__(self, cache: object) -> None: super().__init__(cache) diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index 80215dc6..fbd3631a 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -54,7 +54,7 @@ def get_fully_qualified_names(file_path: str, module_str: str) -> Set[QualifiedN cst.parse_module(dedent(module_str)), cache={ FullyQualifiedNameProvider: FullyQualifiedNameProvider.gen_cache( - Path(""), [file_path], None + Path(""), [file_path], timeout=None ).get(file_path, "") }, ) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index c9c1fc9a..f00c97b6 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -6,7 +6,7 @@ import json import subprocess from pathlib import Path -from typing import Dict, List, Mapping, Optional, Sequence, Tuple, TypedDict +from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, TypedDict import libcst as cst from libcst._position import CodePosition, CodeRange @@ -50,11 +50,13 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): METADATA_DEPENDENCIES = (PositionProvider,) - @staticmethod - # pyre-fixme[40]: Static method `gen_cache` cannot override a non-static method - # defined in `cst.metadata.base_provider.BaseMetadataProvider`. + @classmethod def gen_cache( - root_path: Path, paths: List[str], timeout: Optional[int] + cls, + root_path: Path, + paths: List[str], + timeout: Optional[int] = None, + **kwargs: Any, ) -> Mapping[str, object]: params = ",".join(f"path='{root_path / path}'" for path in paths) cmd_args = ["pyre", "--noninteractive", "query", f"types({params})"] From 8b97600fb3db89ae475de8b00736829eadd917c9 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 12 Jun 2024 17:29:25 +0100 Subject: [PATCH 485/632] fix various Match statement visitation errors (#1161) Fixes #1160. This PR also - fixes `whitespace_before_colon` being swallowed during visitation on `MatchCase`s - adds a new type of roundtrip test that catches issues of this class: the test applies a noop transformer to exercise the visitation API and compares the result with the original source. - adds a few more cases to the match fixture --- libcst/_nodes/statement.py | 12 +++++----- libcst/tests/test_roundtrip.py | 24 ++++++++++++++++--- .../libcst/tests/fixtures/malicious_match.py | 2 ++ 3 files changed, 29 insertions(+), 9 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 6cad7684..1cb9221f 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -2854,17 +2854,16 @@ class MatchCase(CSTNode): self, "whitespace_after_case", self.whitespace_after_case, visitor ), pattern=visit_required(self, "pattern", self.pattern, visitor), - # pyre-fixme[6]: Expected `SimpleWhitespace` for 4th param but got - # `Optional[SimpleWhitespace]`. - whitespace_before_if=visit_optional( + whitespace_before_if=visit_required( self, "whitespace_before_if", self.whitespace_before_if, visitor ), - # pyre-fixme[6]: Expected `SimpleWhitespace` for 5th param but got - # `Optional[SimpleWhitespace]`. - whitespace_after_if=visit_optional( + whitespace_after_if=visit_required( self, "whitespace_after_if", self.whitespace_after_if, visitor ), guard=visit_optional(self, "guard", self.guard, visitor), + whitespace_before_colon=visit_required( + self, "whitespace_before_colon", self.whitespace_before_colon, visitor + ), body=visit_required(self, "body", self.body, visitor), ) @@ -3382,6 +3381,7 @@ class MatchClass(MatchPattern): whitespace_after_kwds=visit_required( self, "whitespace_after_kwds", self.whitespace_after_kwds, visitor ), + rpar=visit_sequence(self, "rpar", self.rpar, visitor), ) def _codegen_impl(self, state: CodegenState) -> None: diff --git a/libcst/tests/test_roundtrip.py b/libcst/tests/test_roundtrip.py index e3a7a35b..d5da81f2 100644 --- a/libcst/tests/test_roundtrip.py +++ b/libcst/tests/test_roundtrip.py @@ -3,25 +3,43 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. + from pathlib import Path from unittest import TestCase -from libcst import parse_module +from libcst import CSTTransformer, parse_module from libcst._parser.entrypoints import is_native fixtures: Path = Path(__file__).parent.parent.parent / "native/libcst/tests/fixtures" +class NOOPTransformer(CSTTransformer): + pass + + class RoundTripTests(TestCase): - def test_clean_roundtrip(self) -> None: + def _get_fixtures(self) -> list[Path]: if not is_native(): self.skipTest("pure python parser doesn't work with this") self.assertTrue(fixtures.exists(), f"{fixtures} should exist") files = list(fixtures.iterdir()) self.assertGreater(len(files), 0) - for file in files: + return files + + def test_clean_roundtrip(self) -> None: + for file in self._get_fixtures(): with self.subTest(file=str(file)): src = file.read_text(encoding="utf-8") mod = parse_module(src) self.maxDiff = None self.assertEqual(mod.code, src) + + def test_transform_roundtrip(self) -> None: + transformer = NOOPTransformer() + self.maxDiff = None + for file in self._get_fixtures(): + with self.subTest(file=str(file)): + src = file.read_text(encoding="utf-8") + mod = parse_module(src) + new_mod = mod.visit(transformer) + self.assertEqual(src, new_mod.code) diff --git a/native/libcst/tests/fixtures/malicious_match.py b/native/libcst/tests/fixtures/malicious_match.py index 8c46571f..54840022 100644 --- a/native/libcst/tests/fixtures/malicious_match.py +++ b/native/libcst/tests/fixtures/malicious_match.py @@ -37,4 +37,6 @@ match x: case x,y , * more :pass case y.z: pass case 1, 2: pass + case ( Foo ( ) ) : pass + case (lol) if ( True , ) :pass From 7bb00179d9d8a5d66d854e7c1e474ec546a0db86 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Jun 2024 19:36:33 +0100 Subject: [PATCH 486/632] Bump pypa/cibuildwheel from 2.18.0 to 2.18.1 (#1155) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.18.0 to 2.18.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.18.0...v2.18.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b60e6b9a..693d4d97 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.18.0 + uses: pypa/cibuildwheel@v2.18.1 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 72701e4b40279c729accdf6d93bb03143337dae2 Mon Sep 17 00:00:00 2001 From: Kirill Ignatev Date: Thu, 4 Jul 2024 02:49:15 -0400 Subject: [PATCH 487/632] Mention codemod -x flag in docs (#1169) --- docs/source/codemods_tutorial.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/source/codemods_tutorial.rst b/docs/source/codemods_tutorial.rst index 78365ca9..8fc71bea 100644 --- a/docs/source/codemods_tutorial.rst +++ b/docs/source/codemods_tutorial.rst @@ -135,16 +135,18 @@ replaces any string which matches our string command-line argument with a consta It also takes care of adding the import required for the constant to be defined properly. Cool! Let's look at the command-line help for this codemod. Let's assume you saved it -as ``constant_folding.py`` inside ``libcst.codemod.commands``. You can get help for the +as ``constant_folding.py``. You can get help for the codemod by running the following command:: - python3 -m libcst.tool codemod constant_folding.ConvertConstantCommand --help + python3 -m libcst.tool codemod -x constant_folding.ConvertConstantCommand --help Notice that along with the default arguments, the ``--string`` and ``--constant`` arguments are present in the help, and the command-line description has been updated with the codemod's description string. You'll notice that the codemod also shows up on ``libcst.tool list``. +And ``-x`` flag allows to load any module as a codemod in addition to the standard ones. + ---------------- Testing Codemods ---------------- From e20e757159f435417fb4d3a0913264c7d252b847 Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Sat, 20 Jul 2024 01:04:25 -0700 Subject: [PATCH 488/632] Remove uses of `# pyre-placeholder-stub` (#1174) --- libcst/tests/test_fuzz.py | 10 ++++++++++ stubs/hypothesis.pyi | 6 +++++- stubs/hypothesmith.pyi | 6 +++++- stubs/setuptools.pyi | 6 +++++- stubs/typing_inspect.pyi | 6 +++++- 5 files changed, 30 insertions(+), 4 deletions(-) diff --git a/libcst/tests/test_fuzz.py b/libcst/tests/test_fuzz.py index 66b32276..6ec95136 100644 --- a/libcst/tests/test_fuzz.py +++ b/libcst/tests/test_fuzz.py @@ -50,6 +50,9 @@ class FuzzTest(unittest.TestCase): @unittest.skipUnless( bool(os.environ.get("HYPOTHESIS", False)), "Hypothesis not requested" ) + # pyre-fixme[56]: Pyre was not able to infer the type of the decorator + # `hypothesis.given($parameter$source_code = + # hypothesmith.from_grammar($parameter$start = "file_input"))`. @hypothesis.given(source_code=from_grammar(start="file_input")) def test_parsing_compilable_module_strings(self, source_code: str) -> None: """The `from_grammar()` strategy generates strings from Python's grammar. @@ -77,6 +80,9 @@ class FuzzTest(unittest.TestCase): @unittest.skipUnless( bool(os.environ.get("HYPOTHESIS", False)), "Hypothesis not requested" ) + # pyre-fixme[56]: Pyre was not able to infer the type of the decorator + # `hypothesis.given($parameter$source_code = + # hypothesmith.from_grammar($parameter$start = "eval_input").map(str.strip))`. @hypothesis.given(source_code=from_grammar(start="eval_input").map(str.strip)) def test_parsing_compilable_expression_strings(self, source_code: str) -> None: """Much like statements, but for expressions this time. @@ -105,6 +111,10 @@ class FuzzTest(unittest.TestCase): @unittest.skipUnless( bool(os.environ.get("HYPOTHESIS", False)), "Hypothesis not requested" ) + # pyre-fixme[56]: Pyre was not able to infer the type of the decorator + # `hypothesis.given($parameter$source_code = + # hypothesmith.from_grammar($parameter$start = "single_input").map(lambda + # ($parameter$s) (s.replace(" @hypothesis.given( source_code=from_grammar(start="single_input").map( lambda s: s.replace("\n", "") + "\n" diff --git a/stubs/hypothesis.pyi b/stubs/hypothesis.pyi index 0568b4d1..a8c27975 100644 --- a/stubs/hypothesis.pyi +++ b/stubs/hypothesis.pyi @@ -1 +1,5 @@ -# pyre-placeholder-stub +# pyre-unsafe + +from typing import Any + +def __getattr__(name: str) -> Any: ... diff --git a/stubs/hypothesmith.pyi b/stubs/hypothesmith.pyi index 0568b4d1..a8c27975 100644 --- a/stubs/hypothesmith.pyi +++ b/stubs/hypothesmith.pyi @@ -1 +1,5 @@ -# pyre-placeholder-stub +# pyre-unsafe + +from typing import Any + +def __getattr__(name: str) -> Any: ... diff --git a/stubs/setuptools.pyi b/stubs/setuptools.pyi index 0568b4d1..a8c27975 100644 --- a/stubs/setuptools.pyi +++ b/stubs/setuptools.pyi @@ -1 +1,5 @@ -# pyre-placeholder-stub +# pyre-unsafe + +from typing import Any + +def __getattr__(name: str) -> Any: ... diff --git a/stubs/typing_inspect.pyi b/stubs/typing_inspect.pyi index 0568b4d1..a8c27975 100644 --- a/stubs/typing_inspect.pyi +++ b/stubs/typing_inspect.pyi @@ -1 +1,5 @@ -# pyre-placeholder-stub +# pyre-unsafe + +from typing import Any + +def __getattr__(name: str) -> Any: ... From b0d145dddd32b312b90c1e8550ce68f35643a58a Mon Sep 17 00:00:00 2001 From: Kirill Ignatev Date: Tue, 30 Jul 2024 04:01:07 -0400 Subject: [PATCH 489/632] Add validation for If node (#1177) * Add validation for If node Don't allow no space no parentheses. --- libcst/_nodes/statement.py | 7 ++++++- libcst/_nodes/tests/test_if.py | 20 +++++++++++++++++++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 1cb9221f..e02ae03c 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -599,7 +599,12 @@ class If(BaseCompoundStatement): #: The whitespace appearing after the test expression but before the colon. whitespace_after_test: SimpleWhitespace = SimpleWhitespace.field("") - # TODO: _validate + def _validate(self) -> None: + if ( + self.whitespace_before_test.empty + and not self.test._safe_to_use_with_word_operator(ExpressionPosition.RIGHT) + ): + raise CSTValidationError("Must have at least one space after 'if' keyword.") def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "If": return If( diff --git a/libcst/_nodes/tests/test_if.py b/libcst/_nodes/tests/test_if.py index e6e4c5ae..7615614e 100644 --- a/libcst/_nodes/tests/test_if.py +++ b/libcst/_nodes/tests/test_if.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any +from typing import Any, Callable import libcst as cst from libcst import parse_statement @@ -129,3 +129,21 @@ class IfTest(CSTNodeTest): ) def test_valid(self, **kwargs: Any) -> None: self.validate_node(**kwargs) + + @data_provider( + ( + # Validate whitespace handling + ( + lambda: cst.If( + cst.Name("conditional"), + cst.SimpleStatementSuite((cst.Pass(),)), + whitespace_before_test=cst.SimpleWhitespace(""), + ), + "Must have at least one space after 'if' keyword.", + ), + ) + ) + def test_invalid( + self, get_node: Callable[[], cst.CSTNode], expected_re: str + ) -> None: + self.assert_invalid(get_node, expected_re) From fb9e47585b7c7f2cc34355a7a87e537f9f0ed1c0 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 31 Jul 2024 12:13:05 +0100 Subject: [PATCH 490/632] make libcst_native::tokenizer public (#1182) --- native/libcst/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index 9bd6bb99..03a97173 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -5,7 +5,7 @@ use std::cmp::{max, min}; -mod tokenizer; +pub mod tokenizer; pub use tokenizer::whitespace_parser::Config; use tokenizer::{whitespace_parser, TokConfig, Token, TokenIterator}; From 814f243a75fbceeac4c461d6c3ea577de0d9e4b7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:13:31 +0100 Subject: [PATCH 491/632] Bump pypa/cibuildwheel from 2.18.1 to 2.19.2 (#1171) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.18.1 to 2.19.2. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.18.1...v2.19.2) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 693d4d97..237e1fb8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.18.1 + uses: pypa/cibuildwheel@v2.19.2 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 56cd1f98628188d0850c33c96dabd3acccc6645a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:14:07 +0100 Subject: [PATCH 492/632] Update maturin requirement from <1.7,>=0.8.3 to >=1.7.0,<1.8 (#1170) Updates the requirements on [maturin](https://github.com/pyo3/maturin) to permit the latest version. - [Release notes](https://github.com/pyo3/maturin/releases) - [Changelog](https://github.com/PyO3/maturin/blob/main/Changelog.md) - [Commits](https://github.com/pyo3/maturin/compare/v0.8.3...v1.7.0) --- updated-dependencies: - dependency-name: maturin dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a61f88b1..0ea330e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dev = [ "hypothesis>=4.36.0", "hypothesmith>=0.0.4", "jupyter>=1.0.0", - "maturin>=0.8.3,<1.7", + "maturin>=1.7.0,<1.8", "nbsphinx>=0.4.2", "prompt-toolkit>=2.0.9", "pyre-check==0.9.18; platform_system != 'Windows'", From 45234f198c22aed1227c347d2fcc29c2f003d386 Mon Sep 17 00:00:00 2001 From: Kirill Ignatev Date: Mon, 5 Aug 2024 17:41:51 -0400 Subject: [PATCH 493/632] Clear warnings for each file in comemod cli (#1184) * Clean warnings for each file in comemod cli * Fix ZeroDivisionError: float division by zero When codemodding too fast * Recreate CodemodContext for each file Keep only context.metadata_manager Remove wrapper from context defaults on each file --- libcst/codemod/_cli.py | 31 ++++++++++++------------ libcst/codemod/tests/test_codemod_cli.py | 30 +++++++++++++++++++++++ 2 files changed, 45 insertions(+), 16 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 8bfc11f8..2481bf9d 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -22,6 +22,7 @@ from typing import Any, AnyStr, cast, Dict, List, Optional, Sequence, Union from libcst import parse_module, PartialParserConfig from libcst.codemod._codemod import Codemod +from libcst.codemod._context import CodemodContext from libcst.codemod._dummy_pool import DummyPool from libcst.codemod._runner import ( SkipFile, @@ -246,30 +247,28 @@ def _execute_transform( # noqa: C901 ), ) - # Somewhat gross hack to provide the filename in the transform's context. - # We do this after the fork so that a context that was initialized with - # some defaults before calling parallel_exec_transform_with_prettyprint - # will be updated per-file. - transformer.context = replace( - transformer.context, - filename=filename, - scratch=deepcopy(scratch), - ) - # determine the module and package name for this file try: module_name_and_package = calculate_module_and_package( config.repo_root or ".", filename ) - transformer.context = replace( - transformer.context, - full_module_name=module_name_and_package.name, - full_package_name=module_name_and_package.package, - ) + mod_name = module_name_and_package.name + pkg_name = module_name_and_package.package except ValueError as ex: print( f"Failed to determine module name for {filename}: {ex}", file=sys.stderr ) + mod_name = None + pkg_name = None + + # Apart from metadata_manager, every field of context should be reset per file + transformer.context = CodemodContext( + scratch=deepcopy(scratch), + filename=filename, + full_module_name=mod_name, + full_package_name=pkg_name, + metadata_manager=transformer.context.metadata_manager, + ) # Run the transform, bail if we failed or if we aren't formatting code try: @@ -420,7 +419,7 @@ class Progress: operations still to do. """ - if files_finished <= 0: + if files_finished <= 0 or elapsed_seconds == 0: # Technically infinite but calculating sounds better. return "[calculating]" diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 0309c74a..0fa3dbef 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -8,10 +8,12 @@ import platform import subprocess import sys +import tempfile from pathlib import Path from unittest import skipIf from libcst._parser.entrypoints import is_native +from libcst.codemod import CodemodTest from libcst.testing.utils import UnitTest @@ -63,3 +65,31 @@ class TestCodemodCLI(UnitTest): stderr=subprocess.STDOUT, ) assert "Finished codemodding 1 files!" in output + + def test_warning_messages_several_files(self) -> None: + code = """ + def baz() -> str: + return "{}: {}".format(*baz) + """ + with tempfile.TemporaryDirectory() as tmpdir: + p = Path(tmpdir) + (p / "mod1.py").write_text(CodemodTest.make_fixture_data(code)) + (p / "mod2.py").write_text(CodemodTest.make_fixture_data(code)) + (p / "mod3.py").write_text(CodemodTest.make_fixture_data(code)) + output = subprocess.run( + [ + sys.executable, + "-m", + "libcst.tool", + "codemod", + "convert_format_to_fstring.ConvertFormatStringCommand", + str(p), + ], + encoding="utf-8", + stderr=subprocess.PIPE, + ) + # Each module will generate a warning, so we should get 3 warnings in total + self.assertIn( + "- 3 warnings were generated.", + output.stderr, + ) From 5f5fd386b07c71cf18534a0cf0c8d6f3368a4fc7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 Aug 2024 09:26:31 +0100 Subject: [PATCH 494/632] Bump pypa/cibuildwheel from 2.19.2 to 2.20.0 (#1185) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.19.2 to 2.20.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.19.2...v2.20.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 237e1fb8..3a57330a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.19.2 + uses: pypa/cibuildwheel@v2.20.0 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 52a59471c9a2869af2969f47c226fdfa17b25536 Mon Sep 17 00:00:00 2001 From: Michel Lind Date: Tue, 13 Aug 2024 02:02:12 -0400 Subject: [PATCH 495/632] Use `license` instead of `license-file` (#1189) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Per the Cargo Book, `license-file` is only to be used if a package uses a non-standard license; see https://doc.rust-lang.org/cargo/reference/manifest.html#the-license-and-license-file-fields Declare the licenses directly, and verify that the LICENSE file containing the license breakdown is still included ``` …n LibCST/native/libcst_derive on  cargo-fixes [!] is 📦 v1.4.0 via 🦀 v1.77.1 ⬢ [fedora:40] ❯ cargo package --list --allow-dirty | grep LICENSE LICENSE …n LibCST/native/libcst_derive on  cargo-fixes [!] is 📦 v1.4.0 via 🦀 v1.77.1 ⬢ [fedora:40] ❯ cd ../libcst michel in LibCST/native/libcst on  cargo-fixes [!] is 📦 v1.4.0 via 🦀 v1.77.1 ⬢ [fedora:40] ❯ cargo package --list --allow-dirty | grep LICENSE LICENSE src/tokenizer/core/LICENSE ``` Signed-off-by: Michel Lind --- native/libcst/Cargo.toml | 2 +- native/libcst_derive/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 2fd6ced5..81f3bd88 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -10,7 +10,7 @@ authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" description = "A Python parser and Concrete Syntax Tree library." -license-file = "LICENSE" +license = "MIT AND (MIT AND PSF-2.0)" repository = "https://github.com/Instagram/LibCST" documentation = "https://libcst.rtfd.org" keywords = ["python", "cst", "ast"] diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index e2eee2da..47d7ca85 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -3,7 +3,7 @@ name = "libcst_derive" version = "1.4.0" edition = "2018" description = "Proc macro helpers for libcst." -license-file = "LICENSE" +license = "MIT" repository = "https://github.com/Instagram/LibCST" documentation = "https://libcst.rtfd.org" keywords = ["macros", "python"] From a4203e5c4904360d2fdd437ffb8c2f28aa161ab5 Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Thu, 22 Aug 2024 16:06:01 -0700 Subject: [PATCH 496/632] Drop codecov from CI and readme (#1192) * Drop codecov from CI and readme * Remove upload job, move coverage check to test job with hatch --- .github/workflows/ci.yml | 38 ++++++-------------------------------- README.rst | 6 +----- codecov.yml | 4 ---- pyproject.toml | 10 ++++++++-- 4 files changed, 15 insertions(+), 43 deletions(-) delete mode 100644 codecov.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce61c654..5c43745c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,12 +31,17 @@ jobs: toolchain: stable - name: Build LibCST run: hatch -vv env create - - name: Tests + - name: Native Parser Tests run: hatch run test - name: Pure Parser Tests env: + COVERAGE_FILE: .coverage.pure LIBCST_PARSER_TYPE: pure run: hatch run test + - name: Coverage + run: | + hatch run coverage combine .coverage.pure + hatch run coverage report # Run linters lint: @@ -71,37 +76,6 @@ jobs: run: pip install -U hatch - run: hatch run typecheck - # Upload test coverage - coverage: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - with: - cache: pip - cache-dependency-path: "pyproject.toml" - python-version: "3.10" - - name: Install hatch - run: pip install -U hatch - - name: Generate Coverage - run: | - hatch run coverage run setup.py test - hatch run coverage xml -i - - uses: codecov/codecov-action@v4 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - files: coverage.xml - fail_ci_if_error: true - verbose: true - - name: Archive Coverage - uses: actions/upload-artifact@v4 - with: - name: coverage - path: coverage.xml - # Build the docs docs: runs-on: ubuntu-latest diff --git a/README.rst b/README.rst index 3b3d39ed..90e2f278 100644 --- a/README.rst +++ b/README.rst @@ -4,7 +4,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python -|support-ukraine| |readthedocs-badge| |ci-badge| |codecov-badge| |pypi-badge| |pypi-download| |notebook-badge| +|support-ukraine| |readthedocs-badge| |ci-badge| |pypi-badge| |pypi-download| |notebook-badge| .. |support-ukraine| image:: https://img.shields.io/badge/Support-Ukraine-FFD500?style=flat&labelColor=005BBB :alt: Support Ukraine - Help Provide Humanitarian Aid to Ukraine. @@ -18,10 +18,6 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python :target: https://github.com/Instagram/LibCST/actions/workflows/build.yml?query=branch%3Amain :alt: Github Actions -.. |codecov-badge| image:: https://codecov.io/gh/Instagram/LibCST/branch/main/graph/badge.svg - :target: https://codecov.io/gh/Instagram/LibCST/branch/main - :alt: CodeCov - .. |pypi-badge| image:: https://img.shields.io/pypi/v/libcst.svg :target: https://pypi.org/project/libcst :alt: PYPI diff --git a/codecov.yml b/codecov.yml deleted file mode 100644 index 32f5ee69..00000000 --- a/codecov.yml +++ /dev/null @@ -1,4 +0,0 @@ -coverage: - status: - project: no - patch: yes diff --git a/pyproject.toml b/pyproject.toml index 0ea330e9..f4670704 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ dependencies = ["pyyaml>=5.2"] [project.optional-dependencies] dev = [ "black==23.12.1", - "coverage>=4.5.4", + "coverage[toml]>=4.5.4", "build>=0.10.0", "fixit==2.1.0", "flake8==7.0.0", @@ -51,6 +51,12 @@ Changelog = "https://github.com/Instagram/LibCST/blob/main/CHANGELOG.md" target-version = ["py39"] extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format +[tool.coverage.report] +fail_under = 93 +precision = 1 +show_missing = true +skip_covered = true + [tool.hatch.envs.default] features = ["dev"] @@ -64,7 +70,7 @@ lint = [ "python -m slotscheck libcst", "python scripts/check_copyright.py", ] -test = ["python --version", "python -m libcst.tests"] +test = ["python --version", "python -m coverage run -m libcst.tests"] typecheck = ["pyre --version", "pyre check"] [tool.slotscheck] From be025613f968e00f41ac1e73d4169762c5065c73 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 16:29:16 -0700 Subject: [PATCH 497/632] Bump ufmt from 2.6.0 to 2.7.0 (#1163) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.6.0 to 2.7.0. - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.6.0...v2.7.0) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f4670704..1e825ae1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ dev = [ "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", - "ufmt==2.6.0", + "ufmt==2.7.0", "usort==1.0.8.post1", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", From cdf9ef414f2c0a1444cc85c10e7173d8ee2f04a9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 16:29:31 -0700 Subject: [PATCH 498/632] Bump flake8 from 7.0.0 to 7.1.1 (#1187) Bumps [flake8](https://github.com/pycqa/flake8) from 7.0.0 to 7.1.1. - [Commits](https://github.com/pycqa/flake8/compare/7.0.0...7.1.1) --- updated-dependencies: - dependency-name: flake8 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 1e825ae1..16123a81 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ dev = [ "coverage[toml]>=4.5.4", "build>=0.10.0", "fixit==2.1.0", - "flake8==7.0.0", + "flake8==7.1.1", "Sphinx>=5.1.1", "hypothesis>=4.36.0", "hypothesmith>=0.0.4", From bf5fb4132e04ac87e1f72c86e37c62dc6594dc9d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 16:46:01 -0700 Subject: [PATCH 499/632] Bump black from 23.12.1 to 24.8.0 (#1186) * Bump black from 23.12.1 to 24.8.0 Bumps [black](https://github.com/psf/black) from 23.12.1 to 24.8.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.12.1...24.8.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Update formatting --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Amethyst Reese --- libcst/_nodes/base.py | 3 +- libcst/_nodes/expression.py | 27 +++++++------ libcst/_nodes/op.py | 9 ++--- libcst/_nodes/statement.py | 39 +++++++++---------- libcst/_parser/base_parser.py | 6 +-- libcst/_parser/parso/pgen2/generator.py | 6 +-- libcst/_parser/types/config.py | 6 +-- libcst/_position.py | 6 +-- libcst/_typed_visitor.py | 3 ++ .../convert_namedtuple_to_dataclass.py | 4 +- .../convert_percent_format_to_fstring.py | 8 ++-- libcst/codemod/commands/rename.py | 12 +++--- libcst/metadata/base_provider.py | 3 +- libcst/metadata/scope_provider.py | 28 ++++++------- libcst/metadata/tests/test_scope_provider.py | 16 +++++--- libcst/tool.py | 3 +- pyproject.toml | 2 +- 17 files changed, 90 insertions(+), 91 deletions(-) diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index d9689f8f..d043cb64 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -292,8 +292,7 @@ class CSTNode(ABC): return False @abstractmethod - def _codegen_impl(self, state: CodegenState) -> None: - ... + def _codegen_impl(self, state: CodegenState) -> None: ... def _codegen(self, state: CodegenState, **kwargs: Any) -> None: state.before_codegen(self) diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 75f7da13..a8f2ac3e 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -1647,9 +1647,9 @@ class Annotation(CSTNode): #: colon or arrow. annotation: BaseExpression - whitespace_before_indicator: Union[ - BaseParenthesizableWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_before_indicator: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) whitespace_after_indicator: BaseParenthesizableWhitespace = SimpleWhitespace.field( " " ) @@ -2101,9 +2101,9 @@ class Lambda(BaseExpression): rpar: Sequence[RightParen] = () #: Whitespace after the lambda keyword, but before any argument or the colon. - whitespace_after_lambda: Union[ - BaseParenthesizableWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_after_lambda: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: if position == ExpressionPosition.LEFT: @@ -2601,9 +2601,9 @@ class From(CSTNode): item: BaseExpression #: The whitespace at the very start of this node. - whitespace_before_from: Union[ - BaseParenthesizableWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_before_from: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) #: The whitespace after the ``from`` keyword, but before the ``item``. whitespace_after_from: BaseParenthesizableWhitespace = SimpleWhitespace.field(" ") @@ -2662,9 +2662,9 @@ class Yield(BaseExpression): rpar: Sequence[RightParen] = () #: Whitespace after the ``yield`` keyword, but before the ``value``. - whitespace_after_yield: Union[ - BaseParenthesizableWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_after_yield: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) def _validate(self) -> None: # Paren rules and such @@ -2748,8 +2748,7 @@ class _BaseElementImpl(CSTNode, ABC): state: CodegenState, default_comma: bool = False, default_comma_whitespace: bool = False, # False for a single-item collection - ) -> None: - ... + ) -> None: ... class BaseElement(_BaseElementImpl, ABC): diff --git a/libcst/_nodes/op.py b/libcst/_nodes/op.py index e19d24d3..1765f536 100644 --- a/libcst/_nodes/op.py +++ b/libcst/_nodes/op.py @@ -43,8 +43,7 @@ class _BaseOneTokenOp(CSTNode, ABC): self.whitespace_after._codegen(state) @abstractmethod - def _get_token(self) -> str: - ... + def _get_token(self) -> str: ... class _BaseTwoTokenOp(CSTNode, ABC): @@ -88,8 +87,7 @@ class _BaseTwoTokenOp(CSTNode, ABC): self.whitespace_after._codegen(state) @abstractmethod - def _get_tokens(self) -> Tuple[str, str]: - ... + def _get_tokens(self) -> Tuple[str, str]: ... class BaseUnaryOp(CSTNode, ABC): @@ -115,8 +113,7 @@ class BaseUnaryOp(CSTNode, ABC): self.whitespace_after._codegen(state) @abstractmethod - def _get_token(self) -> str: - ... + def _get_token(self) -> str: ... class BaseBooleanOp(_BaseOneTokenOp, ABC): diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index e02ae03c..e6d6915f 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -113,8 +113,7 @@ class BaseSmallStatement(CSTNode, ABC): @abstractmethod def _codegen_impl( self, state: CodegenState, default_semicolon: bool = False - ) -> None: - ... + ) -> None: ... @add_slots @@ -273,9 +272,9 @@ class Return(BaseSmallStatement): #: Optional whitespace after the ``return`` keyword before the optional #: value expression. - whitespace_after_return: Union[ - SimpleWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_after_return: Union[SimpleWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) #: Optional semicolon when this is used in a statement line. This semicolon #: owns the whitespace on both sides of it when it is used. @@ -2403,9 +2402,9 @@ class Raise(BaseSmallStatement): cause: Optional[From] = None #: Any whitespace appearing between the ``raise`` keyword and the exception. - whitespace_after_raise: Union[ - SimpleWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_after_raise: Union[SimpleWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) #: Optional semicolon when this is used in a statement line. This semicolon #: owns the whitespace on both sides of it when it is used. @@ -3423,15 +3422,15 @@ class MatchAs(MatchPattern): #: Whitespace between ``pattern`` and the ``as`` keyword (if ``pattern`` is not #: ``None``) - whitespace_before_as: Union[ - BaseParenthesizableWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_before_as: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) #: Whitespace between the ``as`` keyword and ``name`` (if ``pattern`` is not #: ``None``) - whitespace_after_as: Union[ - BaseParenthesizableWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_after_as: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) #: Parenthesis at the beginning of the node lpar: Sequence[LeftParen] = () @@ -3774,16 +3773,16 @@ class TypeAlias(BaseSmallStatement): #: Whitespace between the name and the type parameters (if they exist) or the ``=``. #: If not specified, :class:`MaybeSentinel` will be replaced with a single space if #: there are no type parameters, otherwise no spaces. - whitespace_after_name: Union[ - SimpleWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_after_name: Union[SimpleWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) #: Whitespace between the type parameters and the ``=``. Always empty if there are #: no type parameters. If not specified, :class:`MaybeSentinel` will be replaced #: with a single space if there are type parameters. - whitespace_after_type_parameters: Union[ - SimpleWhitespace, MaybeSentinel - ] = MaybeSentinel.DEFAULT + whitespace_after_type_parameters: Union[SimpleWhitespace, MaybeSentinel] = ( + MaybeSentinel.DEFAULT + ) #: Whitespace between the ``=`` and the value. whitespace_after_equals: SimpleWhitespace = SimpleWhitespace.field(" ") diff --git a/libcst/_parser/base_parser.py b/libcst/_parser/base_parser.py index ef9e1519..6ab97ab8 100644 --- a/libcst/_parser/base_parser.py +++ b/libcst/_parser/base_parser.py @@ -129,11 +129,9 @@ class BaseParser(Generic[_TokenT, _TokenTypeT, _NodeT]): def convert_nonterminal( self, nonterminal: str, children: Sequence[_NodeT] - ) -> _NodeT: - ... + ) -> _NodeT: ... - def convert_terminal(self, token: _TokenT) -> _NodeT: - ... + def convert_terminal(self, token: _TokenT) -> _NodeT: ... def _add_token(self, token: _TokenT) -> None: """ diff --git a/libcst/_parser/parso/pgen2/generator.py b/libcst/_parser/parso/pgen2/generator.py index 4e20e89d..ae889f33 100644 --- a/libcst/_parser/parso/pgen2/generator.py +++ b/libcst/_parser/parso/pgen2/generator.py @@ -72,9 +72,9 @@ class DFAState(Generic[_TokenTypeT]): def __init__(self, from_rule: str, nfa_set: Set[NFAState], final: NFAState) -> None: self.from_rule = from_rule self.nfa_set = nfa_set - self.arcs: Mapping[ - str, DFAState - ] = {} # map from terminals/nonterminals to DFAState + self.arcs: Mapping[str, DFAState] = ( + {} + ) # map from terminals/nonterminals to DFAState # In an intermediary step we set these nonterminal arcs (which has the # same structure as arcs). These don't contain terminals anymore. self.nonterminal_arcs: Mapping[str, DFAState] = {} diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index 1fc32371..289fd8ae 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -27,9 +27,9 @@ except ImportError: BaseWhitespaceParserConfig = config_mod.BaseWhitespaceParserConfig ParserConfig = config_mod.ParserConfig -parser_config_asdict: Callable[ - [ParserConfig], Mapping[str, Any] -] = config_mod.parser_config_asdict +parser_config_asdict: Callable[[ParserConfig], Mapping[str, Any]] = ( + config_mod.parser_config_asdict +) class AutoConfig(Enum): diff --git a/libcst/_position.py b/libcst/_position.py index d7ba0d07..e81e9ab4 100644 --- a/libcst/_position.py +++ b/libcst/_position.py @@ -40,12 +40,10 @@ class CodeRange: end: CodePosition @overload - def __init__(self, start: CodePosition, end: CodePosition) -> None: - ... + def __init__(self, start: CodePosition, end: CodePosition) -> None: ... @overload - def __init__(self, start: Tuple[int, int], end: Tuple[int, int]) -> None: - ... + def __init__(self, start: Tuple[int, int], end: Tuple[int, int]) -> None: ... def __init__(self, start: _CodePositionT, end: _CodePositionT) -> None: if isinstance(start, tuple) and isinstance(end, tuple): diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 742d9f10..e6322353 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -207,6 +207,7 @@ if TYPE_CHECKING: class CSTTypedBaseFunctions: + @mark_no_op def visit_Add(self, node: "Add") -> Optional[bool]: pass @@ -5763,6 +5764,7 @@ class CSTTypedBaseFunctions: class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): + @mark_no_op def leave_Add(self, original_node: "Add") -> None: pass @@ -6441,6 +6443,7 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): + @mark_no_op def leave_Add(self, original_node: "Add", updated_node: "Add") -> "BaseBinaryOp": return updated_node diff --git a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py index 91e78048..f1de5b0c 100644 --- a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py +++ b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py @@ -25,7 +25,9 @@ class ConvertNamedTupleToDataclassCommand(VisitorBasedCodemodCommand): NamedTuple-specific attributes and methods. """ - DESCRIPTION: str = "Convert NamedTuple class declarations to Python 3.7 dataclasses using the @dataclass decorator." + DESCRIPTION: str = ( + "Convert NamedTuple class declarations to Python 3.7 dataclasses using the @dataclass decorator." + ) METADATA_DEPENDENCIES: Sequence[ProviderT] = (QualifiedNameProvider,) # The 'NamedTuple' we are interested in diff --git a/libcst/codemod/commands/convert_percent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py index 9908a5b6..501c9621 100644 --- a/libcst/codemod/commands/convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/convert_percent_format_to_fstring.py @@ -97,9 +97,11 @@ class ConvertPercentFormatStringCommand(VisitorBasedCodemodCommand): parts.append(cst.FormattedStringText(value=token)) expressions: List[cst.CSTNode] = list( *itertools.chain( - [elm.value for elm in expr.elements] - if isinstance(expr, cst.Tuple) - else [expr] + ( + [elm.value for elm in expr.elements] + if isinstance(expr, cst.Tuple) + else [expr] + ) for expr in exprs ) ) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 4bd0ee3d..290f1ac1 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -144,9 +144,9 @@ class RenameCommand(VisitorBasedCodemodCommand): self.bypass_import = True if replacement_module != import_alias_full_name: self.scheduled_removals.add(original_node) - new_name_node: Union[ - cst.Attribute, cst.Name - ] = self.gen_name_or_attr_node(replacement_module) + new_name_node: Union[cst.Attribute, cst.Name] = ( + self.gen_name_or_attr_node(replacement_module) + ) new_names.append(cst.ImportAlias(name=new_name_node)) else: new_names.append(import_alias) @@ -198,9 +198,9 @@ class RenameCommand(VisitorBasedCodemodCommand): self.scheduled_removals.add(original_node) continue - new_import_alias_name: Union[ - cst.Attribute, cst.Name - ] = self.gen_name_or_attr_node(replacement_obj) + new_import_alias_name: Union[cst.Attribute, cst.Name] = ( + self.gen_name_or_attr_node(replacement_obj) + ) # Rename on the spot only if this is the only imported name under the module. if len(names) == 1: updated_node = updated_node.with_changes( diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index 2e03416f..811fed62 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -48,8 +48,7 @@ class GenCacheMethod(Protocol): *, timeout: Optional[int] = None, use_pyproject_toml: bool = False, - ) -> Mapping[str, object]: - ... + ) -> Mapping[str, object]: ... # We can't use an ABCMeta here, because of metaclass conflicts diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 75f37a06..ac9c4b9d 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -199,8 +199,7 @@ class BaseAssignment(abc.ABC): return -1 @abc.abstractmethod - def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: - ... + def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: ... class Assignment(BaseAssignment): @@ -225,9 +224,11 @@ class Assignment(BaseAssignment): def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: return { QualifiedName( - f"{self.scope._name_prefix}.{full_name}" - if self.scope._name_prefix - else full_name, + ( + f"{self.scope._name_prefix}.{full_name}" + if self.scope._name_prefix + else full_name + ), QualifiedNameSource.LOCAL, ) } @@ -306,9 +307,11 @@ class ImportAssignment(Assignment): remaining_name = remaining_name.lstrip(".") results.add( QualifiedName( - f"{real_name}.{remaining_name}" - if remaining_name - else real_name, + ( + f"{real_name}.{remaining_name}" + if remaining_name + else real_name + ), QualifiedNameSource.IMPORT, ) ) @@ -503,19 +506,16 @@ class Scope(abc.ABC): @abc.abstractmethod def _resolve_scope_for_access( self, name: str, from_scope: "Scope" - ) -> Set[BaseAssignment]: - ... + ) -> Set[BaseAssignment]: ... def __hash__(self) -> int: return id(self) @abc.abstractmethod - def record_global_overwrite(self, name: str) -> None: - ... + def record_global_overwrite(self, name: str) -> None: ... @abc.abstractmethod - def record_nonlocal_overwrite(self, name: str) -> None: - ... + def record_nonlocal_overwrite(self, name: str) -> None: ... def get_qualified_names_for( self, node: Union[str, cst.CSTNode] diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index a2087645..fd23e993 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -653,12 +653,16 @@ class ScopeProviderTest(UnitTest): for assignment in scope_of_outer_f["var"] }, { - outer_f_body_var.targets[0].target - if isinstance(outer_f_body_var, cst.Assign) - else outer_f_body_var, - inner_f_body_var.targets[0].target - if isinstance(inner_f_body_var, cst.Assign) - else inner_f_body_var, + ( + outer_f_body_var.targets[0].target + if isinstance(outer_f_body_var, cst.Assign) + else outer_f_body_var + ), + ( + inner_f_body_var.targets[0].target + if isinstance(inner_f_body_var, cst.Assign) + else inner_f_body_var + ), }, ) diff --git a/libcst/tool.py b/libcst/tool.py index 3c00ba8d..ace15ff6 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -461,8 +461,7 @@ class _SerializerBase(ABC): return f"{comments}{os.linesep}{self._serialize_impl(key, value)}{os.linesep}" @abstractmethod - def _serialize_impl(self, key: str, value: object) -> str: - ... + def _serialize_impl(self, key: str, value: object) -> str: ... class _StrSerializer(_SerializerBase): diff --git a/pyproject.toml b/pyproject.toml index 16123a81..7ff3421a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ dependencies = ["pyyaml>=5.2"] [project.optional-dependencies] dev = [ - "black==23.12.1", + "black==24.8.0", "coverage[toml]>=4.5.4", "build>=0.10.0", "fixit==2.1.0", From 2e496954276e639d9d8229a4d99b55821f2d7e61 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:21:43 -0700 Subject: [PATCH 500/632] Bump memchr from 2.5.0 to 2.7.4 in /native (#1165) Bumps [memchr](https://github.com/BurntSushi/memchr) from 2.5.0 to 2.7.4. - [Commits](https://github.com/BurntSushi/memchr/compare/2.5.0...2.7.4) --- updated-dependencies: - dependency-name: memchr dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index e62007e4..227f0a22 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -407,9 +407,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.5.0" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memoffset" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 81f3bd88..95cb4f1e 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -41,7 +41,7 @@ thiserror = "1.0.37" peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" -memchr = "2.5.0" +memchr = "2.7.4" libcst_derive = { path = "../libcst_derive", version = "1.4.0" } [dev-dependencies] From b552469f1c413e69e8840ff067f5c7dbc1e9d245 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:21:56 -0700 Subject: [PATCH 501/632] Bump itertools from 0.11.0 to 0.13.0 in /native (#1150) Bumps [itertools](https://github.com/rust-itertools/itertools) from 0.11.0 to 0.13.0. - [Changelog](https://github.com/rust-itertools/itertools/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-itertools/itertools/compare/v0.11.0...v0.13.0) --- updated-dependencies: - dependency-name: itertools dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 6 +++--- native/libcst/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 227f0a22..1685477e 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -319,9 +319,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.11.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] @@ -360,7 +360,7 @@ dependencies = [ "chic", "criterion", "difference", - "itertools 0.11.0", + "itertools 0.13.0", "libcst_derive", "memchr", "paste", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 95cb4f1e..eab93261 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -48,7 +48,7 @@ libcst_derive = { path = "../libcst_derive", version = "1.4.0" } criterion = { version = "0.5.1", features = ["html_reports"] } difference = "2.0.0" rayon = "1.7.0" -itertools = "0.11.0" +itertools = "0.13.0" [[bench]] name = "parser_benchmark" From 6017c40d19e54e51bba6a79280525a15a8d7de22 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:22:20 -0700 Subject: [PATCH 502/632] Bump paste from 1.0.9 to 1.0.15 in /native (#1146) Bumps [paste](https://github.com/dtolnay/paste) from 1.0.9 to 1.0.15. - [Release notes](https://github.com/dtolnay/paste/releases) - [Commits](https://github.com/dtolnay/paste/compare/1.0.9...1.0.15) --- updated-dependencies: - dependency-name: paste dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 1685477e..b2ad9036 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -487,9 +487,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.9" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "peg" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index eab93261..47d2ab33 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -35,7 +35,7 @@ py = ["pyo3", "pyo3/extension-module"] trace = ["peg/trace"] [dependencies] -paste = "1.0.9" +paste = "1.0.15" pyo3 = { version = "0.20", optional = true } thiserror = "1.0.37" peg = "0.8.1" From 07ec61d8b007d8fafa999e4807e69a72855e98ab Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:22:34 -0700 Subject: [PATCH 503/632] Bump thiserror from 1.0.37 to 1.0.55 in /native (#1086) Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.37 to 1.0.55. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.37...1.0.55) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 10 +++++----- native/libcst/Cargo.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index b2ad9036..1a72cd4f 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -795,22 +795,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.37" +version = "1.0.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" +checksum = "6e3de26b0965292219b4287ff031fcba86837900fe9cd2b34ea8ad893c0953d2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.37" +version = "1.0.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" +checksum = "268026685b2be38d7103e9e507c938a1fcb3d7e6eb15e87870b617bf37b6d581" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.41", ] [[package]] diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 47d2ab33..5be6e7fb 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -37,7 +37,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.15" pyo3 = { version = "0.20", optional = true } -thiserror = "1.0.37" +thiserror = "1.0.55" peg = "0.8.1" chic = "1.2.2" regex = "1.9.3" From 9f198179f3980b3fa417ade3f265576dcf5cd442 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:23:25 -0700 Subject: [PATCH 504/632] Bump ts-graphviz/setup-graphviz from 1 to 2 (#1105) Bumps [ts-graphviz/setup-graphviz](https://github.com/ts-graphviz/setup-graphviz) from 1 to 2. - [Release notes](https://github.com/ts-graphviz/setup-graphviz/releases) - [Commits](https://github.com/ts-graphviz/setup-graphviz/compare/v1...v2) --- updated-dependencies: - dependency-name: ts-graphviz/setup-graphviz dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5c43745c..84a3e3d6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -90,7 +90,7 @@ jobs: python-version: "3.10" - name: Install hatch run: pip install -U hatch - - uses: ts-graphviz/setup-graphviz@v1 + - uses: ts-graphviz/setup-graphviz@v2 - run: hatch run docs - name: Archive Docs uses: actions/upload-artifact@v4 From 38cc0798b2c5384de06f05e52a857c367417b5be Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:26:14 -0700 Subject: [PATCH 505/632] Bump trybuild from 1.0.71 to 1.0.86 in /native (#1076) Bumps [trybuild](https://github.com/dtolnay/trybuild) from 1.0.71 to 1.0.86. - [Release notes](https://github.com/dtolnay/trybuild/releases) - [Commits](https://github.com/dtolnay/trybuild/compare/1.0.71...1.0.86) --- updated-dependencies: - dependency-name: trybuild dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 57 ++++++++++++++++++++++++----------------------- 1 file changed, 29 insertions(+), 28 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 1a72cd4f..af9db1c7 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -35,6 +35,15 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "basic-toml" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "823388e228f614e9558c6804262db37960ec8821856535f5c3f59913140558f8" +dependencies = [ + "serde", +] + [[package]] name = "bitflags" version = "1.3.2" @@ -376,7 +385,7 @@ name = "libcst_derive" version = "1.4.0" dependencies = [ "quote", - "syn 2.0.41", + "syn 2.0.75", "trybuild", ] @@ -548,9 +557,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.70" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -601,7 +610,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.41", + "syn 2.0.75", ] [[package]] @@ -613,14 +622,14 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.75", ] [[package]] name = "quote" -version = "1.0.33" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] @@ -721,31 +730,32 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.145" +version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" +checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.145" +version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" +checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.75", ] [[package]] name = "serde_json" -version = "1.0.81" +version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" +checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] @@ -769,9 +779,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.41" +version = "2.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269" +checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9" dependencies = [ "proc-macro2", "quote", @@ -823,28 +833,19 @@ dependencies = [ "serde_json", ] -[[package]] -name = "toml" -version = "0.5.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" -dependencies = [ - "serde", -] - [[package]] name = "trybuild" -version = "1.0.71" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea496675d71016e9bc76aa42d87f16aefd95447cc5818e671e12b2d7e269075d" +checksum = "8419ecd263363827c5730386f418715766f584e2f874d32c23c5b00bd9727e7e" dependencies = [ + "basic-toml", "glob", "once_cell", "serde", "serde_derive", "serde_json", "termcolor", - "toml", ] [[package]] From 47b171b9a7d341d9eafda6d904984fd89294d99d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:40:50 -0700 Subject: [PATCH 506/632] Bump rayon from 1.7.0 to 1.10.0 in /native (#1193) Bumps [rayon](https://github.com/rayon-rs/rayon) from 1.7.0 to 1.10.0. - [Changelog](https://github.com/rayon-rs/rayon/blob/main/RELEASES.md) - [Commits](https://github.com/rayon-rs/rayon/compare/rayon-core-v1.7.0...rayon-core-v1.10.0) --- updated-dependencies: - dependency-name: rayon dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 43 ++++++---------------------------------- native/libcst/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 38 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index af9db1c7..d805ff11 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -180,16 +180,6 @@ dependencies = [ "itertools 0.10.5", ] -[[package]] -name = "crossbeam-channel" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - [[package]] name = "crossbeam-deque" version = "0.8.1" @@ -276,15 +266,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.2" @@ -312,7 +293,7 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi", "rustix", "windows-sys", ] @@ -447,16 +428,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "num_cpus" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" -dependencies = [ - "hermit-abi 0.1.19", - "libc", -] - [[package]] name = "once_cell" version = "1.16.0" @@ -636,9 +607,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.7.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -646,14 +617,12 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.11.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] @@ -820,7 +789,7 @@ checksum = "268026685b2be38d7103e9e507c938a1fcb3d7e6eb15e87870b617bf37b6d581" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.75", ] [[package]] diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 5be6e7fb..ec7b5897 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -47,7 +47,7 @@ libcst_derive = { path = "../libcst_derive", version = "1.4.0" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } difference = "2.0.0" -rayon = "1.7.0" +rayon = "1.10.0" itertools = "0.13.0" [[bench]] From 77e2a51d35d671e2061ef036bd03ad01562658e1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:42:44 -0700 Subject: [PATCH 507/632] Bump peg from 0.8.1 to 0.8.4 in /native (#1197) Bumps [peg](https://github.com/kevinmehall/rust-peg) from 0.8.1 to 0.8.4. - [Release notes](https://github.com/kevinmehall/rust-peg/releases) - [Commits](https://github.com/kevinmehall/rust-peg/compare/0.8.1...0.8.4) --- updated-dependencies: - dependency-name: peg dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 12 ++++++------ native/libcst/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index d805ff11..ff5cb7c7 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -473,9 +473,9 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "peg" -version = "0.8.1" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a07f2cafdc3babeebc087e499118343442b742cc7c31b4d054682cc598508554" +checksum = "295283b02df346d1ef66052a757869b2876ac29a6bb0ac3f5f7cd44aebe40e8f" dependencies = [ "peg-macros", "peg-runtime", @@ -483,9 +483,9 @@ dependencies = [ [[package]] name = "peg-macros" -version = "0.8.1" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a90084dc05cf0428428e3d12399f39faad19b0909f64fb9170c9fdd6d9cd49b" +checksum = "bdad6a1d9cf116a059582ce415d5f5566aabcd4008646779dab7fdc2a9a9d426" dependencies = [ "peg-runtime", "proc-macro2", @@ -494,9 +494,9 @@ dependencies = [ [[package]] name = "peg-runtime" -version = "0.8.1" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa00462b37ead6d11a82c9d568b26682d78e0477dc02d1966c013af80969739" +checksum = "e3aeb8f54c078314c2065ee649a7241f46b9d8e418e1a9581ba0546657d7aa3a" [[package]] name = "plotters" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index ec7b5897..249d2023 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -38,7 +38,7 @@ trace = ["peg/trace"] paste = "1.0.15" pyo3 = { version = "0.20", optional = true } thiserror = "1.0.55" -peg = "0.8.1" +peg = "0.8.4" chic = "1.2.2" regex = "1.9.3" memchr = "2.7.4" From 8c5aa32000485f1f5352a059bebe2c624a8de67d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 18:47:27 -0700 Subject: [PATCH 508/632] Bump thiserror from 1.0.55 to 1.0.63 in /native (#1196) Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.55 to 1.0.63. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.55...1.0.63) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 8 ++++---- native/libcst/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index ff5cb7c7..620e9b6b 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -774,18 +774,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.55" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e3de26b0965292219b4287ff031fcba86837900fe9cd2b34ea8ad893c0953d2" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.55" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "268026685b2be38d7103e9e507c938a1fcb3d7e6eb15e87870b617bf37b6d581" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 249d2023..51b15799 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -37,7 +37,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.15" pyo3 = { version = "0.20", optional = true } -thiserror = "1.0.55" +thiserror = "1.0.63" peg = "0.8.4" chic = "1.2.2" regex = "1.9.3" From ccf9623ccf03ca905db5bddcc949023f29f836f1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 18:47:34 -0700 Subject: [PATCH 509/632] Bump trybuild from 1.0.86 to 1.0.99 in /native (#1194) Bumps [trybuild](https://github.com/dtolnay/trybuild) from 1.0.86 to 1.0.99. - [Release notes](https://github.com/dtolnay/trybuild/releases) - [Commits](https://github.com/dtolnay/trybuild/compare/1.0.86...1.0.99) --- updated-dependencies: - dependency-name: trybuild dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 92 +++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 78 insertions(+), 14 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 620e9b6b..09d04218 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -35,15 +35,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "basic-toml" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "823388e228f614e9558c6804262db37960ec8821856535f5c3f59913140558f8" -dependencies = [ - "serde", -] - [[package]] name = "bitflags" version = "1.3.2" @@ -227,6 +218,12 @@ version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + [[package]] name = "errno" version = "0.3.2" @@ -260,6 +257,12 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + [[package]] name = "heck" version = "0.4.1" @@ -272,6 +275,16 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +[[package]] +name = "indexmap" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" +dependencies = [ + "equivalent", + "hashbrown", +] + [[package]] name = "indoc" version = "2.0.4" @@ -729,6 +742,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" +dependencies = [ + "serde", +] + [[package]] name = "smallvec" version = "1.8.1" @@ -803,18 +825,51 @@ dependencies = [ ] [[package]] -name = "trybuild" -version = "1.0.86" +name = "toml" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8419ecd263363827c5730386f418715766f584e2f874d32c23c5b00bd9727e7e" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "trybuild" +version = "1.0.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "207aa50d36c4be8d8c6ea829478be44a372c6a77669937bb39c698e52f1491e8" dependencies = [ - "basic-toml", "glob", - "once_cell", "serde", "serde_derive", "serde_json", "termcolor", + "toml", ] [[package]] @@ -1000,3 +1055,12 @@ name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "winnow" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +dependencies = [ + "memchr", +] From 9834694730b868fa36688bad0d429fac8b758990 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 10:26:36 -0700 Subject: [PATCH 510/632] Bump regex from 1.9.3 to 1.10.6 in /native (#1198) Bumps [regex](https://github.com/rust-lang/regex) from 1.9.3 to 1.10.6. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.9.3...1.10.6) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 12 ++++++------ native/libcst/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 09d04218..e26b3353 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -649,9 +649,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.9.3" +version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", @@ -661,9 +661,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", @@ -672,9 +672,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.7.4" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "rustix" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 51b15799..11e3e362 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -40,7 +40,7 @@ pyo3 = { version = "0.20", optional = true } thiserror = "1.0.63" peg = "0.8.4" chic = "1.2.2" -regex = "1.9.3" +regex = "1.10.6" memchr = "2.7.4" libcst_derive = { path = "../libcst_derive", version = "1.4.0" } From 61b9ac3a6872d97cfa72bcb02dea1a6ca73bf73e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 07:37:01 +0100 Subject: [PATCH 511/632] Bump pypa/cibuildwheel from 2.20.0 to 2.21.0 (#1206) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.20.0 to 2.21.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.20.0...v2.21.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3a57330a..060b3caa 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.20.0 + uses: pypa/cibuildwheel@v2.21.0 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 0974a416a79b3f1074e838ee8340e412e2e5e1b7 Mon Sep 17 00:00:00 2001 From: Wim Jeantine-Glenn Date: Wed, 18 Sep 2024 03:23:30 -0500 Subject: [PATCH 512/632] Typo fix in codemods_tutorial.rst (trivial) (#1208) --- docs/source/codemods_tutorial.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/codemods_tutorial.rst b/docs/source/codemods_tutorial.rst index 8fc71bea..6f657fbc 100644 --- a/docs/source/codemods_tutorial.rst +++ b/docs/source/codemods_tutorial.rst @@ -26,7 +26,7 @@ then edit the produced ``.libcst.codemod.yaml`` file:: python3 -m libcst.tool initialize . The file includes provisions for customizing any generated code marker, calling an -external code formatter such as `black `_, blackisting +external code formatter such as `black `_, blacklisting patterns of files you never wish to touch and a list of modules that contain valid codemods that can be executed. If you want to write and run codemods specific to your repository or organization, you can add an in-repo module location to the list of From 6a059bec9a41e787f2c0158a2c3680a81d94a99d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Sep 2024 11:22:57 +0100 Subject: [PATCH 513/632] Bump pypa/cibuildwheel from 2.21.0 to 2.21.1 (#1211) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.21.0 to 2.21.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.21.0...v2.21.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 060b3caa..da29a104 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.21.0 + uses: pypa/cibuildwheel@v2.21.1 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 9fd67bca49fbb45b9572c5c2bf5131f2c05245cd Mon Sep 17 00:00:00 2001 From: Kirill Ignatev Date: Wed, 25 Sep 2024 06:29:54 -0400 Subject: [PATCH 514/632] fix certain matchers breaking under multiprocessing by initializing them late (#1204) * Add is_property check Skip properties to prevent exceptions * Delayed initialization of matchers To support multiprocessing on Windows/macOS Issue #1181 * Add a test for matcher decorators with multiprocessing --- libcst/codemod/tests/test_codemod_cli.py | 31 ++++++++ libcst/matchers/_visitors.py | 95 +++++++++++++----------- 2 files changed, 83 insertions(+), 43 deletions(-) diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 0fa3dbef..8773cd77 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -93,3 +93,34 @@ class TestCodemodCLI(UnitTest): "- 3 warnings were generated.", output.stderr, ) + + def test_matcher_decorators_multiprocessing(self) -> None: + file_count = 5 + code = """ + def baz(): # type: int + return 5 + """ + with tempfile.TemporaryDirectory() as tmpdir: + p = Path(tmpdir) + # Using more than chunksize=4 files to trigger multiprocessing + for i in range(file_count): + (p / f"mod{i}.py").write_text(CodemodTest.make_fixture_data(code)) + output = subprocess.run( + [ + sys.executable, + "-m", + "libcst.tool", + "codemod", + # Good candidate since it uses matcher decorators + "convert_type_comments.ConvertTypeComments", + str(p), + "--jobs", + str(file_count), + ], + encoding="utf-8", + stderr=subprocess.PIPE, + ) + self.assertIn( + f"Transformed {file_count} files successfully.", + output.stderr, + ) diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index 9349c5b5..b9252173 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -60,6 +60,11 @@ CONCRETE_METHODS: Set[str] = { } +def is_property(obj: object, attr_name: str) -> bool: + """Check if obj.attr is a property without evaluating it.""" + return isinstance(getattr(type(obj), attr_name, None), property) + + # pyre-ignore We don't care about Any here, its not exposed. def _match_decorator_unpickler(kwargs: Any) -> "MatchDecoratorMismatch": return MatchDecoratorMismatch(**kwargs) @@ -265,20 +270,22 @@ def _check_types( ) -def _gather_matchers(obj: object) -> Set[BaseMatcherNode]: - visit_matchers: Set[BaseMatcherNode] = set() +def _gather_matchers(obj: object) -> Dict[BaseMatcherNode, Optional[cst.CSTNode]]: + """ + Set of gating matchers that we need to track and evaluate. We use these + in conjunction with the call_if_inside and call_if_not_inside decorators + to determine whether to call a visit/leave function. + """ - for func in dir(obj): - try: - for matcher in getattr(getattr(obj, func), VISIT_POSITIVE_MATCHER_ATTR, []): - visit_matchers.add(cast(BaseMatcherNode, matcher)) - for matcher in getattr(getattr(obj, func), VISIT_NEGATIVE_MATCHER_ATTR, []): - visit_matchers.add(cast(BaseMatcherNode, matcher)) - except Exception: - # This could be a caculated property, and calling getattr() evaluates it. - # We have no control over the implementation detail, so if it raises, we - # should not crash. - pass + visit_matchers: Dict[BaseMatcherNode, Optional[cst.CSTNode]] = {} + + for attr_name in dir(obj): + if not is_property(obj, attr_name): + func = getattr(obj, attr_name) + for matcher in getattr(func, VISIT_POSITIVE_MATCHER_ATTR, []): + visit_matchers[cast(BaseMatcherNode, matcher)] = None + for matcher in getattr(func, VISIT_NEGATIVE_MATCHER_ATTR, []): + visit_matchers[cast(BaseMatcherNode, matcher)] = None return visit_matchers @@ -302,16 +309,12 @@ def _gather_constructed_visit_funcs( ] = {} for funcname in dir(obj): - try: - possible_func = getattr(obj, funcname) - if not ismethod(possible_func): - continue - func = cast(Callable[[cst.CSTNode], None], possible_func) - except Exception: - # This could be a caculated property, and calling getattr() evaluates it. - # We have no control over the implementation detail, so if it raises, we - # should not crash. + if is_property(obj, funcname): continue + possible_func = getattr(obj, funcname) + if not ismethod(possible_func): + continue + func = cast(Callable[[cst.CSTNode], None], possible_func) matchers = getattr(func, CONSTRUCTED_VISIT_MATCHER_ATTR, []) if matchers: # Make sure that we aren't accidentally putting a @visit on a visit_Node. @@ -337,16 +340,12 @@ def _gather_constructed_leave_funcs( ] = {} for funcname in dir(obj): - try: - possible_func = getattr(obj, funcname) - if not ismethod(possible_func): - continue - func = cast(Callable[[cst.CSTNode], None], possible_func) - except Exception: - # This could be a caculated property, and calling getattr() evaluates it. - # We have no control over the implementation detail, so if it raises, we - # should not crash. + if is_property(obj, funcname): continue + possible_func = getattr(obj, funcname) + if not ismethod(possible_func): + continue + func = cast(Callable[[cst.CSTNode], None], possible_func) matchers = getattr(func, CONSTRUCTED_LEAVE_MATCHER_ATTR, []) if matchers: # Make sure that we aren't accidentally putting a @leave on a leave_Node. @@ -448,12 +447,7 @@ class MatcherDecoratableTransformer(CSTTransformer): def __init__(self) -> None: CSTTransformer.__init__(self) - # List of gating matchers that we need to track and evaluate. We use these - # in conjuction with the call_if_inside and call_if_not_inside decorators - # to determine whether or not to call a visit/leave function. - self._matchers: Dict[BaseMatcherNode, Optional[cst.CSTNode]] = { - m: None for m in _gather_matchers(self) - } + self.__matchers: Optional[Dict[BaseMatcherNode, Optional[cst.CSTNode]]] = None # Mapping of matchers to functions. If in the course of visiting the tree, # a node matches one of these matchers, the corresponding function will be # called as if it was a visit_* method. @@ -486,6 +480,16 @@ class MatcherDecoratableTransformer(CSTTransformer): expected_none_return=False, ) + @property + def _matchers(self) -> Dict[BaseMatcherNode, Optional[cst.CSTNode]]: + if self.__matchers is None: + self.__matchers = _gather_matchers(self) + return self.__matchers + + @_matchers.setter + def _matchers(self, value: Dict[BaseMatcherNode, Optional[cst.CSTNode]]) -> None: + self.__matchers = value + def on_visit(self, node: cst.CSTNode) -> bool: # First, evaluate any matchers that we have which we are not inside already. self._matchers = _visit_matchers(self._matchers, node, self) @@ -660,12 +664,7 @@ class MatcherDecoratableVisitor(CSTVisitor): def __init__(self) -> None: CSTVisitor.__init__(self) - # List of gating matchers that we need to track and evaluate. We use these - # in conjuction with the call_if_inside and call_if_not_inside decorators - # to determine whether or not to call a visit/leave function. - self._matchers: Dict[BaseMatcherNode, Optional[cst.CSTNode]] = { - m: None for m in _gather_matchers(self) - } + self.__matchers: Optional[Dict[BaseMatcherNode, Optional[cst.CSTNode]]] = None # Mapping of matchers to functions. If in the course of visiting the tree, # a node matches one of these matchers, the corresponding function will be # called as if it was a visit_* method. @@ -693,6 +692,16 @@ class MatcherDecoratableVisitor(CSTVisitor): expected_none_return=True, ) + @property + def _matchers(self) -> Dict[BaseMatcherNode, Optional[cst.CSTNode]]: + if self.__matchers is None: + self.__matchers = _gather_matchers(self) + return self.__matchers + + @_matchers.setter + def _matchers(self, value: Dict[BaseMatcherNode, Optional[cst.CSTNode]]) -> None: + self.__matchers = value + def on_visit(self, node: cst.CSTNode) -> bool: # First, evaluate any matchers that we have which we are not inside already. self._matchers = _visit_matchers(self._matchers, node, self) From 586b4d74e4e2b1ddc483cf2538d0b9d6eebcfe0f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Sep 2024 11:32:36 +0100 Subject: [PATCH 515/632] Bump ufmt from 2.7.0 to 2.7.3 (#1212) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.7.0 to 2.7.3. - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.7.0...v2.7.3) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7ff3421a..af9328f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ dev = [ "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", - "ufmt==2.7.0", + "ufmt==2.7.3", "usort==1.0.8.post1", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", From b49e705579ab47498d542771a8e704a070638474 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 09:22:46 +0100 Subject: [PATCH 516/632] Bump pypa/cibuildwheel from 2.21.1 to 2.21.2 (#1218) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.21.1 to 2.21.2. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.21.1...v2.21.2) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index da29a104..b780639b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.21.1 + uses: pypa/cibuildwheel@v2.21.2 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From a2b3456fe9f205b24de3dffb4750d4c70a8e4f0c Mon Sep 17 00:00:00 2001 From: khameeteman <73279381+khameeteman@users.noreply.github.com> Date: Thu, 10 Oct 2024 11:38:27 +0200 Subject: [PATCH 517/632] include python 3.13 in build (#1203) --- .github/workflows/ci.yml | 2 +- pyproject.toml | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 84a3e3d6..e8ff9207 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,7 @@ jobs: fail-fast: false matrix: os: [macos-latest, ubuntu-latest, windows-latest] - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 with: diff --git a/pyproject.toml b/pyproject.toml index af9328f3..79ce0ba1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools", "setuptools-scm", "setuptools-rust", "wheel"] [project] name = "libcst" -description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.12 programs." +description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.13 programs." readme = "README.rst" dynamic = ["version"] license = { file = "LICENSE" } @@ -14,6 +14,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] requires-python = ">=3.9" dependencies = ["pyyaml>=5.2"] From dde88a2082fa99b9fb8c5d2544c3eadfb63815fc Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 10 Oct 2024 10:53:01 +0100 Subject: [PATCH 518/632] add changelog entry --- CHANGELOG.md | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1bccbc23..ee0f25b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,32 @@ +# 1.5.0 - 2024-10-10 + +## Added +* FullyQualifiedNameProvider: Optionally consider pyproject.toml files when determining a file's module name and package by @camillol in https://github.com/Instagram/LibCST/pull/1148 +* Add validation for If node by @kiri11 in https://github.com/Instagram/LibCST/pull/1177 +* include python 3.13 in build by @khameeteman in https://github.com/Instagram/LibCST/pull/1203 + +## Fixed +* fix various Match statement visitation errors by @zsol in https://github.com/Instagram/LibCST/pull/1161 +* Mention codemod -x flag in docs by @kiri11 in https://github.com/Instagram/LibCST/pull/1169 +* Clear warnings for each file in codemod cli by @kiri11 in https://github.com/Instagram/LibCST/pull/1184 +* Typo fix in codemods_tutorial.rst (trivial) by @wimglenn in https://github.com/Instagram/LibCST/pull/1208 +* fix certain matchers breaking under multiprocessing by initializing them late by @kiri11 in https://github.com/Instagram/LibCST/pull/1204 + +## Updated +* make libcst_native::tokenizer public by @zsol in https://github.com/Instagram/LibCST/pull/1182 +* Use `license` instead of `license-file` by @michel-slm in https://github.com/Instagram/LibCST/pull/1189 +* Drop codecov from CI and readme by @amyreese in https://github.com/Instagram/LibCST/pull/1192 + + +## New Contributors +* @kiri11 made their first contribution in https://github.com/Instagram/LibCST/pull/1169 +* @grievejia made their first contribution in https://github.com/Instagram/LibCST/pull/1174 +* @michel-slm made their first contribution in https://github.com/Instagram/LibCST/pull/1189 +* @wimglenn made their first contribution in https://github.com/Instagram/LibCST/pull/1208 +* @khameeteman made their first contribution in https://github.com/Instagram/LibCST/pull/1203 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.4.0...v1.5.0 + # 1.4.0 - 2024-05-22 ## Fixed From 527a4b04e15d7e98d2b9c46c68594d8c8a233208 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 10 Oct 2024 10:54:15 +0100 Subject: [PATCH 519/632] bump versions in cargo.toml --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- native/libcst_derive/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index e26b3353..958df42c 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -358,7 +358,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.4.0" +version = "1.5.0" dependencies = [ "chic", "criterion", @@ -376,7 +376,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.4.0" +version = "1.5.0" dependencies = [ "quote", "syn 2.0.75", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 11e3e362..a1aaadad 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.4.0" +version = "1.5.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 47d7ca85..7566c87d 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.4.0" +version = "1.5.0" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From 42df0881baf0e7a388ecaec0bf4cc28ab758f01c Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 10 Oct 2024 11:20:25 +0100 Subject: [PATCH 520/632] Fix doc build error (#1221) Apparently doc2path now returns a path not a string --- docs/source/conf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index c210fc1d..d3311e90 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -196,6 +196,7 @@ intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True + # -- autodoc customization def strip_class_signature(app, what, name, obj, options, signature, return_annotation): if what == "class": @@ -218,7 +219,7 @@ def setup(app): nbsphinx_prolog = r""" -{% set docname = 'docs/source/' + env.doc2path(env.docname, base=None) %} +{% set docname = 'docs/source/' + env.doc2path(env.docname, base=None)|string%} .. only:: html From bfd1000289296436e9364013d1ce08808cd19da4 Mon Sep 17 00:00:00 2001 From: Blazej Michalik <6691643+MrMino@users.noreply.github.com> Date: Sun, 17 Nov 2024 11:19:27 +0100 Subject: [PATCH 521/632] ci: build wheels for musllinux (#1243) --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b780639b..0d3d7698 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -20,7 +20,7 @@ jobs: CIBW_BEFORE_ALL_MACOS: "rustup target add aarch64-apple-darwin x86_64-apple-darwin" CIBW_BEFORE_ALL_WINDOWS: "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME' - CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_*" + CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64" CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 1 steps: From 4ff38c039e075d5e3a8ebdb81ff2c8419c281066 Mon Sep 17 00:00:00 2001 From: Blazej Michalik <6691643+MrMino@users.noreply.github.com> Date: Sun, 17 Nov 2024 19:01:34 +0100 Subject: [PATCH 522/632] ci: skip musllinux builds for unsupported archs (#1244) This fixes current CI failures by skipping Musl builds for `i686`, `ppc64le`, `s390x`, and `armv7le` architectures. The failures are due to Rust ecosystem having only partial support / not having tool chains for these architectures. For the list of supported archs and tiers of support, see: https://doc.rust-lang.org/nightly/rustc/platform-support.html The architectures skipped here are either, from the Rust PoV: - Tier-2 support without host tools. - Tier-3 support without host tools. --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0d3d7698..81381f50 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -20,7 +20,7 @@ jobs: CIBW_BEFORE_ALL_MACOS: "rustup target add aarch64-apple-darwin x86_64-apple-darwin" CIBW_BEFORE_ALL_WINDOWS: "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME' - CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64" + CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_i686 *-musllinux_ppc64le *-musllinux_s390x *-musllinux_armv7l" CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 1 steps: From 4aa92f3857afb79b790d112da80d36fe93bd57ed Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 18 Nov 2024 20:19:01 +0000 Subject: [PATCH 523/632] Bump version to 1.5.1 (#1246) --- CHANGELOG.md | 11 +++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 15 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ee0f25b6..8e9d5aab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +# 1.5.1 - 2024-11-18 + +## Added + +* build wheels for musllinux by @MrMino in https://github.com/Instagram/LibCST/pull/1243 + +## New Contributors +* @MrMino made their first contribution in https://github.com/Instagram/LibCST/pull/1243 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.5.0...v1.5.1 + # 1.5.0 - 2024-10-10 ## Added diff --git a/native/Cargo.lock b/native/Cargo.lock index 958df42c..00409e83 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -358,7 +358,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.5.0" +version = "1.5.1" dependencies = [ "chic", "criterion", @@ -376,7 +376,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.5.0" +version = "1.5.1" dependencies = [ "quote", "syn 2.0.75", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index a1aaadad..7cdcc61e 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.5.0" +version = "1.5.1" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 7566c87d..9ede217d 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.5.0" +version = "1.5.1" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From 08da127e5434eb42bae5dc71c03363d6a281d4bc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 27 Nov 2024 12:27:11 +0000 Subject: [PATCH 524/632] Bump pypa/cibuildwheel from 2.21.2 to 2.22.0 (#1247) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.21.2 to 2.22.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.21.2...v2.22.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 81381f50..886947d0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.21.2 + uses: pypa/cibuildwheel@v2.22.0 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 6fdca74c90f790728f20b9710d333f523e3970c6 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 28 Nov 2024 14:59:43 +0000 Subject: [PATCH 525/632] rename: store state in scratch (#1250) This PR changes RenameCodemod to store its per-module state in `self.context.scratch` which gets properly reset between files. --- libcst/codemod/commands/rename.py | 38 +++++++++++++++++++++++++------ 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 290f1ac1..9d710cca 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -92,14 +92,38 @@ class RenameCommand(VisitorBasedCodemodCommand): self.old_module: str = old_module self.old_mod_or_obj: str = old_mod_or_obj - self.as_name: Optional[Tuple[str, str]] = None + @property + def as_name(self) -> Optional[Tuple[str, str]]: + if "as_name" not in self.context.scratch: + self.context.scratch["as_name"] = None + return self.context.scratch["as_name"] - # A set of nodes that have been renamed to help with the cleanup of now potentially unused - # imports, during import cleanup in `leave_Module`. - self.scheduled_removals: Set[cst.CSTNode] = set() - # If an import has been renamed while inside an `Import` or `ImportFrom` node, we want to flag - # this so that we do not end up with two of the same import. - self.bypass_import = False + @as_name.setter + def as_name(self, value: Optional[Tuple[str, str]]) -> None: + self.context.scratch["as_name"] = value + + @property + def scheduled_removals(self) -> Set[cst.CSTNode]: + """A set of nodes that have been renamed to help with the cleanup of now potentially unused + imports, during import cleanup in `leave_Module`.""" + if "scheduled_removals" not in self.context.scratch: + self.context.scratch["scheduled_removals"] = set() + return self.context.scratch["scheduled_removals"] + + @scheduled_removals.setter + def scheduled_removals(self, value: Set[cst.CSTNode]) -> None: + self.context.scratch["scheduled_removals"] = value + + @property + def bypass_import(self) -> bool: + """A flag to indicate that an import has been renamed while inside an `Import` or `ImportFrom` node.""" + if "bypass_import" not in self.context.scratch: + self.context.scratch["bypass_import"] = False + return self.context.scratch["bypass_import"] + + @bypass_import.setter + def bypass_import(self, value: bool) -> None: + self.context.scratch["bypass_import"] = value def visit_Import(self, node: cst.Import) -> None: for import_alias in node.names: From 28e0f397b278f061f3c6cef9bf80a0422b7b447e Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 28 Nov 2024 20:02:23 +0000 Subject: [PATCH 526/632] rename: handle imports via a parent module (#1251) When requesting a rename for `a.b.c`, we want to act on `import a` when it's used to access `a.b.c` --- libcst/codemod/commands/rename.py | 48 ++++++++++---------- libcst/codemod/commands/tests/test_rename.py | 36 +++++++++++++++ 2 files changed, 60 insertions(+), 24 deletions(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 9d710cca..ae7138c8 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -142,29 +142,23 @@ class RenameCommand(VisitorBasedCodemodCommand): ) -> cst.Import: new_names = [] for import_alias in updated_node.names: + # We keep the original import_alias here in case it's used by other symbols. + # It will be removed later in RemoveImportsVisitor if it's unused. + new_names.append(import_alias) import_alias_name = import_alias.name import_alias_full_name = get_full_name_for_node(import_alias_name) if import_alias_full_name is None: raise Exception("Could not parse full name for ImportAlias.name node.") - if isinstance(import_alias_name, cst.Name) and self.old_name.startswith( - import_alias_full_name + "." - ): - # Might, be in use elsewhere in the code, so schedule a potential removal, and add another alias. - new_names.append(import_alias) - replacement_module = self.gen_replacement_module(import_alias_full_name) - self.bypass_import = True - if replacement_module != import_alias_name.value: - self.scheduled_removals.add(original_node) - new_names.append( - cst.ImportAlias(name=cst.Name(value=replacement_module)) - ) - elif isinstance( - import_alias_name, cst.Attribute + if isinstance( + import_alias_name, (cst.Name, cst.Attribute) ) and self.old_name.startswith(import_alias_full_name + "."): - # Same idea as above. - new_names.append(import_alias) replacement_module = self.gen_replacement_module(import_alias_full_name) + if not replacement_module: + # here import_alias_full_name isn't an exact match for old_name + # don't add an import here, it will be handled either in more + # specific import aliases or at the very end + continue self.bypass_import = True if replacement_module != import_alias_full_name: self.scheduled_removals.add(original_node) @@ -172,8 +166,6 @@ class RenameCommand(VisitorBasedCodemodCommand): self.gen_name_or_attr_node(replacement_module) ) new_names.append(cst.ImportAlias(name=new_name_node)) - else: - new_names.append(import_alias) return updated_node.with_changes(names=new_names) @@ -289,10 +281,14 @@ class RenameCommand(VisitorBasedCodemodCommand): if not inside_import_statement: self.scheduled_removals.add(original_node.value) if full_replacement_name == self.new_name: - return updated_node.with_changes( - value=cst.parse_expression(new_value), - attr=cst.Name(value=new_attr.rstrip(".")), - ) + value = cst.parse_expression(new_value) + if new_attr: + return updated_node.with_changes( + value=value, + attr=cst.Name(value=new_attr.rstrip(".")), + ) + assert isinstance(value, (cst.Name, cst.Attribute)) + return value return self.gen_name_or_attr_node(new_attr) @@ -329,8 +325,12 @@ class RenameCommand(VisitorBasedCodemodCommand): if original_name == self.old_mod_or_obj: return self.new_mod_or_obj - elif original_name == ".".join([self.old_module, self.old_mod_or_obj]): - return self.new_name + elif original_name == self.old_name: + return ( + self.new_mod_or_obj + if (not self.bypass_import and self.new_mod_or_obj) + else self.new_name + ) elif original_name.endswith("." + self.old_mod_or_obj): return self.new_mod_or_obj else: diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index 2f897119..efcfbc6e 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -705,3 +705,39 @@ class TestRenameCommand(CodemodTest): old_name="a.b.qux", new_name="a:b.qux", ) + + def test_import_parent_module(self) -> None: + before = """ + import a + a.b.c(a.b.c.d) + """ + after = """ + from z import c + + c(c.d) + """ + self.assertCodemod(before, after, old_name="a.b.c", new_name="z.c") + + def test_import_parent_module_2(self) -> None: + before = """ + import a.b + a.b.c.d(a.b.c.d.x) + """ + after = """ + from z import c + + c(c.x) + """ + self.assertCodemod(before, after, old_name="a.b.c.d", new_name="z.c") + + def test_import_parent_module_3(self) -> None: + before = """ + import a + a.b.c(a.b.c.d) + """ + after = """ + import z.c + + z.c(z.c.d) + """ + self.assertCodemod(before, after, old_name="a.b.c", new_name="z.c:") From a36432c958fc34bdf28e19f2962ac9f70d35d9f9 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 29 Nov 2024 11:23:59 +0000 Subject: [PATCH 527/632] rename: Fix imports with aliases (#1252) When renaming `a.b` -> `c.d`, in imports like `import a.b as x` the as_name wasn't correctly removed even though references to `x` were renamed to `c.d`. This PR makes the codemod remove the `x` asname in these cases. --- libcst/codemod/commands/rename.py | 16 ++++++++-- libcst/codemod/commands/tests/test_rename.py | 32 ++++++++++++++++++++ 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index ae7138c8..aad4cea6 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -150,9 +150,7 @@ class RenameCommand(VisitorBasedCodemodCommand): if import_alias_full_name is None: raise Exception("Could not parse full name for ImportAlias.name node.") - if isinstance( - import_alias_name, (cst.Name, cst.Attribute) - ) and self.old_name.startswith(import_alias_full_name + "."): + if self.old_name.startswith(import_alias_full_name + "."): replacement_module = self.gen_replacement_module(import_alias_full_name) if not replacement_module: # here import_alias_full_name isn't an exact match for old_name @@ -166,6 +164,18 @@ class RenameCommand(VisitorBasedCodemodCommand): self.gen_name_or_attr_node(replacement_module) ) new_names.append(cst.ImportAlias(name=new_name_node)) + elif ( + import_alias_full_name == self.new_name + and import_alias.asname is not None + ): + self.bypass_import = True + # TODO: put this into self.scheduled_removals + RemoveImportsVisitor.remove_unused_import( + self.context, + import_alias.evaluated_name, + asname=import_alias.evaluated_alias, + ) + new_names.append(import_alias.with_changes(asname=None)) return updated_node.with_changes(names=new_names) diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index efcfbc6e..20e1c7d4 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -111,6 +111,27 @@ class TestRenameCommand(CodemodTest): new_name="baz.quux", ) + def test_rename_attr_asname_2(self) -> None: + before = """ + import foo.qux as bar + + def test() -> None: + bar.z(5) + """ + after = """ + import baz.quux + + def test() -> None: + baz.quux.z(5) + """ + + self.assertCodemod( + before, + after, + old_name="foo.qux", + new_name="baz.quux", + ) + def test_rename_module_import(self) -> None: before = """ import a.b @@ -741,3 +762,14 @@ class TestRenameCommand(CodemodTest): z.c(z.c.d) """ self.assertCodemod(before, after, old_name="a.b.c", new_name="z.c:") + + def test_import_parent_module_asname(self) -> None: + before = """ + import a.b as alias + alias.c(alias.c.d) + """ + after = """ + import z + z.c(z.c.d) + """ + self.assertCodemod(before, after, old_name="a.b.c", new_name="z.c") From c05ac74b9a0c6e2cc071e04bbc54686671aae776 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 2 Dec 2024 10:00:35 +0000 Subject: [PATCH 528/632] refactor: allow scheduled_removals to accept a tuple (#1253) This fixes a TODO --- libcst/codemod/commands/rename.py | 34 ++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index aad4cea6..8b6302c9 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -103,15 +103,20 @@ class RenameCommand(VisitorBasedCodemodCommand): self.context.scratch["as_name"] = value @property - def scheduled_removals(self) -> Set[cst.CSTNode]: + def scheduled_removals( + self, + ) -> Set[Union[cst.CSTNode, Tuple[str, Optional[str], Optional[str]]]]: """A set of nodes that have been renamed to help with the cleanup of now potentially unused - imports, during import cleanup in `leave_Module`.""" + imports, during import cleanup in `leave_Module`. Can also contain tuples that can be passed + directly to RemoveImportsVisitor.remove_unused_import().""" if "scheduled_removals" not in self.context.scratch: self.context.scratch["scheduled_removals"] = set() return self.context.scratch["scheduled_removals"] @scheduled_removals.setter - def scheduled_removals(self, value: Set[cst.CSTNode]) -> None: + def scheduled_removals( + self, value: Set[Union[cst.CSTNode, Tuple[str, Optional[str], Optional[str]]]] + ) -> None: self.context.scratch["scheduled_removals"] = value @property @@ -169,11 +174,13 @@ class RenameCommand(VisitorBasedCodemodCommand): and import_alias.asname is not None ): self.bypass_import = True - # TODO: put this into self.scheduled_removals - RemoveImportsVisitor.remove_unused_import( - self.context, - import_alias.evaluated_name, - asname=import_alias.evaluated_alias, + # Add removal tuple instead of calling directly + self.scheduled_removals.add( + ( + import_alias.evaluated_name, + None, + import_alias.evaluated_alias, + ) ) new_names.append(import_alias.with_changes(asname=None)) @@ -307,10 +314,13 @@ class RenameCommand(VisitorBasedCodemodCommand): def leave_Module( self, original_node: cst.Module, updated_node: cst.Module ) -> cst.Module: - for removal_node in self.scheduled_removals: - RemoveImportsVisitor.remove_unused_import_by_node( - self.context, removal_node - ) + for removal in self.scheduled_removals: + if isinstance(removal, tuple): + RemoveImportsVisitor.remove_unused_import( + self.context, removal[0], removal[1], removal[2] + ) + else: + RemoveImportsVisitor.remove_unused_import_by_node(self.context, removal) # If bypass_import is False, we know that no import statements were directly renamed, and the fact # that we have any `self.scheduled_removals` tells us we encountered a matching `old_name` in the code. if not self.bypass_import and self.scheduled_removals: From 8c30fcef305dc59d817f10fe9c48227a7b329d90 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 2 Dec 2024 10:00:59 +0000 Subject: [PATCH 529/632] rename: don't leave trailing commas (#1254) When renaming the last element of a `from a import b,c` import, don't leave the trailing comma after `b` --- libcst/codemod/commands/rename.py | 6 ++- libcst/codemod/commands/tests/test_rename.py | 45 ++++++++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 8b6302c9..9ad4334d 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -214,7 +214,7 @@ class RenameCommand(VisitorBasedCodemodCommand): return updated_node else: - new_names = [] + new_names: list[cst.ImportAlias] = [] for import_alias in names: alias_name = get_full_name_for_node(import_alias.name) if alias_name is not None: @@ -252,6 +252,10 @@ class RenameCommand(VisitorBasedCodemodCommand): # This import might be in use elsewhere in the code, so schedule a potential removal. self.scheduled_removals.add(original_node) new_names.append(import_alias) + if isinstance(new_names[-1].comma, cst.Comma): + new_names[-1] = new_names[-1].with_changes( + comma=cst.MaybeSentinel.DEFAULT + ) return updated_node.with_changes(names=new_names) return updated_node diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index 20e1c7d4..8245a34c 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -382,6 +382,28 @@ class TestRenameCommand(CodemodTest): new_name="d.z", ) + def test_comma_import(self) -> None: + before = """ + import a, b, c + + class Foo(a.z): + bar: b.bar + baz: c.baz + """ + after = """ + import a, b, d + + class Foo(a.z): + bar: b.bar + baz: d.baz + """ + self.assertCodemod( + before, + after, + old_name="c.baz", + new_name="d.baz", + ) + def test_other_import_froms_untouched(self) -> None: before = """ from a import b, c, d @@ -405,6 +427,29 @@ class TestRenameCommand(CodemodTest): new_name="f.b", ) + def test_comma_import_from(self) -> None: + before = """ + from a import b, c, d + + class Foo(b): + bar: c.bar + baz: d.baz + """ + after = """ + from a import b, c + from f import d + + class Foo(b): + bar: c.bar + baz: d.baz + """ + self.assertCodemod( + before, + after, + old_name="a.d", + new_name="f.d", + ) + def test_no_removal_of_import_in_use(self) -> None: before = """ import a From d24192a40ff1dbfaab0dac34d42b1e12c08f763e Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 2 Dec 2024 16:13:12 +0000 Subject: [PATCH 530/632] rename: don't eat commas unnecessarily (#1256) #1254 was a bit too aggressive in removing commas. They shouldn't be removed if there are parenthesis around the imported names. --- libcst/codemod/commands/rename.py | 2 +- libcst/codemod/commands/tests/test_rename.py | 32 ++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 9ad4334d..f09c41fb 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -252,7 +252,7 @@ class RenameCommand(VisitorBasedCodemodCommand): # This import might be in use elsewhere in the code, so schedule a potential removal. self.scheduled_removals.add(original_node) new_names.append(import_alias) - if isinstance(new_names[-1].comma, cst.Comma): + if isinstance(new_names[-1].comma, cst.Comma) and updated_node.rpar is None: new_names[-1] = new_names[-1].with_changes( comma=cst.MaybeSentinel.DEFAULT ) diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index 8245a34c..4b88ee29 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -450,6 +450,38 @@ class TestRenameCommand(CodemodTest): new_name="f.d", ) + def test_comma_import_from_parens(self) -> None: + before = """ + from a import ( + b, + c, + d, + ) + from x import (y,) + + class Foo(b): + bar: c.bar + baz: d.baz + """ + after = """ + from a import ( + b, + c, + ) + from x import (y,) + from f import d + + class Foo(b): + bar: c.bar + baz: d.baz + """ + self.assertCodemod( + before, + after, + old_name="a.d", + new_name="f.d", + ) + def test_no_removal_of_import_in_use(self) -> None: before = """ import a From b04670c1668fd933ec04b09b22bf482b76029da7 Mon Sep 17 00:00:00 2001 From: khameeteman <73279381+khameeteman@users.noreply.github.com> Date: Sat, 7 Dec 2024 22:33:56 +0100 Subject: [PATCH 531/632] bump 3.12 to 3.13 in readme (#1228) --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 90e2f278..3d0b13be 100644 --- a/README.rst +++ b/README.rst @@ -33,7 +33,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python .. intro-start -LibCST parses Python 3.0 -> 3.12 source code as a CST tree that keeps +LibCST parses Python 3.0 -> 3.13 source code as a CST tree that keeps all formatting details (comments, whitespaces, parentheses, etc). It's useful for building automated refactoring (codemod) applications and linters. From a3b5529bb3fd0bc1d10dd5f1c451625d384075ff Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 11 Dec 2024 20:30:33 +0000 Subject: [PATCH 532/632] rename: fix renaming toplevel names (#1260) For toplevel module names imported via `import foo`, the rename codemod would fail to change these. This PR fixes that. --- libcst/codemod/commands/rename.py | 2 +- libcst/codemod/commands/tests/test_rename.py | 22 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index f09c41fb..ee196582 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -347,7 +347,7 @@ class RenameCommand(VisitorBasedCodemodCommand): module_as_name[0] + ".", module_as_name[1] + ".", 1 ) - if original_name == self.old_mod_or_obj: + if self.old_module and original_name == self.old_mod_or_obj: return self.new_mod_or_obj elif original_name == self.old_name: return ( diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index 4b88ee29..6e8ef936 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -850,3 +850,25 @@ class TestRenameCommand(CodemodTest): z.c(z.c.d) """ self.assertCodemod(before, after, old_name="a.b.c", new_name="z.c") + + def test_push_down_toplevel_names(self) -> None: + before = """ + import foo + foo.baz() + """ + after = """ + import quux.foo + quux.foo.baz() + """ + self.assertCodemod(before, after, old_name="foo", new_name="quux.foo") + + def test_push_down_toplevel_names_with_asname(self) -> None: + before = """ + import foo as bar + bar.baz() + """ + after = """ + import quux.foo + quux.foo.baz() + """ + self.assertCodemod(before, after, old_name="foo", new_name="quux.foo") From 3e4bae471b8fbad42b1102dd14db36a4d8e1e23f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 11:41:48 +0000 Subject: [PATCH 533/632] Bump ufmt from 2.7.3 to 2.8.0 (#1236) Bumps [ufmt](https://github.com/omnilib/ufmt) from 2.7.3 to 2.8.0. - [Changelog](https://github.com/omnilib/ufmt/blob/main/CHANGELOG.md) - [Commits](https://github.com/omnilib/ufmt/compare/v2.7.3...v2.8.0) --- updated-dependencies: - dependency-name: ufmt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 79ce0ba1..fc8f851b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ dev = [ "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", "sphinx-rtd-theme>=0.4.3", - "ufmt==2.7.3", + "ufmt==2.8.0", "usort==1.0.8.post1", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", From 230f177c84f6711f17e00851ccfa6a41e63e3caf Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 16 Dec 2024 10:01:02 +0000 Subject: [PATCH 534/632] ci: audit workflows with zizmor (#1262) https://woodruffw.github.io/zizmor/ --- .github/workflows/build.yml | 1 + .github/workflows/ci.yml | 8 +++++++ .github/workflows/pypi_upload.yml | 1 + .github/workflows/zizmor.yml | 35 +++++++++++++++++++++++++++++++ 4 files changed, 45 insertions(+) create mode 100644 .github/workflows/zizmor.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 886947d0..b14b4210 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -27,6 +27,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false - uses: actions/setup-python@v5 with: cache: pip diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e8ff9207..ab290f54 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,6 +18,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false - uses: actions/setup-python@v5 with: cache: pip @@ -50,6 +51,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false - uses: actions/setup-python@v5 with: cache: pip @@ -67,6 +69,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false - uses: actions/setup-python@v5 with: cache: pip @@ -83,6 +86,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false - uses: actions/setup-python@v5 with: cache: pip @@ -108,6 +112,8 @@ jobs: os: [ubuntu-latest, macos-latest, windows-latest] steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy @@ -141,6 +147,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - uses: dtolnay/rust-toolchain@stable with: components: rustfmt diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index ee9990cb..f03151f7 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -22,6 +22,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false - name: Download binary wheels id: download uses: actions/download-artifact@v4 diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml new file mode 100644 index 00000000..dad6232f --- /dev/null +++ b/.github/workflows/zizmor.yml @@ -0,0 +1,35 @@ +name: GitHub Actions Security Analysis with zizmor 🌈 + +on: + push: + branches: ["main"] + pull_request: + branches: ["**"] + +jobs: + zizmor: + name: zizmor latest via PyPI + runs-on: ubuntu-latest + permissions: + security-events: write + contents: read + actions: read + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v4 + + - name: Run zizmor 🌈 + run: uvx zizmor --format sarif . > results.sarif + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: results.sarif + category: zizmor \ No newline at end of file From d26987202b10534495477a93fad603627ecf28a8 Mon Sep 17 00:00:00 2001 From: Danny Yang Date: Thu, 2 Jan 2025 19:48:55 -0500 Subject: [PATCH 535/632] Add codemod to convert `typing.Union` to `|` (#1270) * add union to or codemod * lint * early return --- .../codemod/commands/convert_union_to_or.py | 56 ++++++++++++ .../tests/test_convert_union_to_or.py | 86 +++++++++++++++++++ 2 files changed, 142 insertions(+) create mode 100644 libcst/codemod/commands/convert_union_to_or.py create mode 100644 libcst/codemod/commands/tests/test_convert_union_to_or.py diff --git a/libcst/codemod/commands/convert_union_to_or.py b/libcst/codemod/commands/convert_union_to_or.py new file mode 100644 index 00000000..96a64314 --- /dev/null +++ b/libcst/codemod/commands/convert_union_to_or.py @@ -0,0 +1,56 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict + +import libcst as cst +from libcst.codemod import VisitorBasedCodemodCommand +from libcst.codemod.visitors import RemoveImportsVisitor +from libcst.metadata import QualifiedName, QualifiedNameProvider, QualifiedNameSource + + +class ConvertUnionToOrCommand(VisitorBasedCodemodCommand): + DESCRIPTION: str = "Convert `Union[A, B]` to `A | B` in Python 3.10+" + + METADATA_DEPENDENCIES = (QualifiedNameProvider,) + + def leave_Subscript( + self, original_node: cst.Subscript, updated_node: cst.Subscript + ) -> cst.BaseExpression: + """ + Given a subscript, check if it's a Union - if so, either flatten the members + into a nested BitOr (if multiple members) or unwrap the type (if only one member). + """ + if not QualifiedNameProvider.has_name( + self, + original_node, + QualifiedName(name="typing.Union", source=QualifiedNameSource.IMPORT), + ): + return updated_node + types = [ + cst.ensure_type( + cst.ensure_type(s, cst.SubscriptElement).slice, cst.Index + ).value + for s in updated_node.slice + ] + if len(types) == 1: + return types[0] + else: + replacement = cst.BinaryOperation( + left=types[0], right=types[1], operator=cst.BitOr() + ) + for type_ in types[2:]: + replacement = cst.BinaryOperation( + left=replacement, right=type_, operator=cst.BitOr() + ) + return replacement + + def leave_Module( + self, original_node: cst.Module, updated_node: cst.Module + ) -> cst.Module: + RemoveImportsVisitor.remove_unused_import( + self.context, module="typing", obj="Union" + ) + return updated_node diff --git a/libcst/codemod/commands/tests/test_convert_union_to_or.py b/libcst/codemod/commands/tests/test_convert_union_to_or.py new file mode 100644 index 00000000..5ba557d2 --- /dev/null +++ b/libcst/codemod/commands/tests/test_convert_union_to_or.py @@ -0,0 +1,86 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict + +from libcst.codemod import CodemodTest +from libcst.codemod.commands.convert_union_to_or import ConvertUnionToOrCommand + + +class TestConvertUnionToOrCommand(CodemodTest): + TRANSFORM = ConvertUnionToOrCommand + + def test_simple_union(self) -> None: + before = """ + from typing import Union + x: Union[int, str] + """ + after = """ + x: int | str + """ + self.assertCodemod(before, after) + + def test_nested_union(self) -> None: + before = """ + from typing import Union + x: Union[int, Union[str, float]] + """ + after = """ + x: int | str | float + """ + self.assertCodemod(before, after) + + def test_single_type_union(self) -> None: + before = """ + from typing import Union + x: Union[int] + """ + after = """ + x: int + """ + self.assertCodemod(before, after) + + def test_union_with_alias(self) -> None: + before = """ + import typing as t + x: t.Union[int, str] + """ + after = """ + import typing as t + x: int | str + """ + self.assertCodemod(before, after) + + def test_union_with_unused_import(self) -> None: + before = """ + from typing import Union, List + x: Union[int, str] + """ + after = """ + from typing import List + x: int | str + """ + self.assertCodemod(before, after) + + def test_union_no_import(self) -> None: + before = """ + x: Union[int, str] + """ + after = """ + x: Union[int, str] + """ + self.assertCodemod(before, after) + + def test_union_in_function(self) -> None: + before = """ + from typing import Union + def foo(x: Union[int, str]) -> Union[float, None]: + ... + """ + after = """ + def foo(x: int | str) -> float | None: + ... + """ + self.assertCodemod(before, after) From 776452f35149e2b05a2e5c19d1b417001b3921e0 Mon Sep 17 00:00:00 2001 From: Danny Yang Date: Thu, 2 Jan 2025 19:49:03 -0500 Subject: [PATCH 536/632] Add codemod to fix variadic callable annotations (#1269) * add fix variadic callable codemod * format --- .../codemod/commands/fix_variadic_callable.py | 40 ++++++++ .../tests/test_fix_variadic_callable.py | 92 +++++++++++++++++++ 2 files changed, 132 insertions(+) create mode 100644 libcst/codemod/commands/fix_variadic_callable.py create mode 100644 libcst/codemod/commands/tests/test_fix_variadic_callable.py diff --git a/libcst/codemod/commands/fix_variadic_callable.py b/libcst/codemod/commands/fix_variadic_callable.py new file mode 100644 index 00000000..85cb0aa0 --- /dev/null +++ b/libcst/codemod/commands/fix_variadic_callable.py @@ -0,0 +1,40 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict + +import libcst as cst +import libcst.matchers as m +from libcst.codemod import VisitorBasedCodemodCommand +from libcst.metadata import QualifiedName, QualifiedNameProvider, QualifiedNameSource + + +class FixVariadicCallableCommmand(VisitorBasedCodemodCommand): + DESCRIPTION: str = ( + "Fix incorrect variadic callable type annotations from `Callable[[...], T]` to `Callable[..., T]``" + ) + + METADATA_DEPENDENCIES = (QualifiedNameProvider,) + + def leave_Subscript( + self, original_node: cst.Subscript, updated_node: cst.Subscript + ) -> cst.BaseExpression: + if QualifiedNameProvider.has_name( + self, + original_node, + QualifiedName(name="typing.Callable", source=QualifiedNameSource.IMPORT), + ): + node_matches = len(updated_node.slice) == 2 and m.matches( + updated_node.slice[0], + m.SubscriptElement( + slice=m.Index(value=m.List(elements=[m.Element(m.Ellipsis())])) + ), + ) + + if node_matches: + slices = list(updated_node.slice) + slices[0] = cst.SubscriptElement(cst.Index(cst.Ellipsis())) + return updated_node.with_changes(slice=slices) + return updated_node diff --git a/libcst/codemod/commands/tests/test_fix_variadic_callable.py b/libcst/codemod/commands/tests/test_fix_variadic_callable.py new file mode 100644 index 00000000..848f0c98 --- /dev/null +++ b/libcst/codemod/commands/tests/test_fix_variadic_callable.py @@ -0,0 +1,92 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict + +from libcst.codemod import CodemodTest +from libcst.codemod.commands.fix_variadic_callable import FixVariadicCallableCommmand + + +class TestFixVariadicCallableCommmand(CodemodTest): + TRANSFORM = FixVariadicCallableCommmand + + def test_callable_typing(self) -> None: + before = """ + from typing import Callable + x: Callable[[...], int] = ... + """ + after = """ + from typing import Callable + x: Callable[..., int] = ... + """ + self.assertCodemod(before, after) + + def test_callable_typing_alias(self) -> None: + before = """ + import typing as t + x: t.Callable[[...], int] = ... + """ + after = """ + import typing as t + x: t.Callable[..., int] = ... + """ + self.assertCodemod(before, after) + + def test_callable_import_alias(self) -> None: + before = """ + from typing import Callable as C + x: C[[...], int] = ... + """ + after = """ + from typing import Callable as C + x: C[..., int] = ... + """ + self.assertCodemod(before, after) + + def test_callable_with_optional(self) -> None: + before = """ + from typing import Callable + def foo(bar: Optional[Callable[[...], int]]) -> Callable[[...], int]: + ... + """ + after = """ + from typing import Callable + def foo(bar: Optional[Callable[..., int]]) -> Callable[..., int]: + ... + """ + self.assertCodemod(before, after) + + def test_callable_with_arguments(self) -> None: + before = """ + from typing import Callable + x: Callable[[int], int] + """ + after = """ + from typing import Callable + x: Callable[[int], int] + """ + self.assertCodemod(before, after) + + def test_callable_with_variadic_arguments(self) -> None: + before = """ + from typing import Callable + x: Callable[[int, int, ...], int] + """ + after = """ + from typing import Callable + x: Callable[[int, int, ...], int] + """ + self.assertCodemod(before, after) + + def test_callable_no_arguments(self) -> None: + before = """ + from typing import Callable + x: Callable + """ + after = """ + from typing import Callable + x: Callable + """ + self.assertCodemod(before, after) From c4e7934253a64b0e4ed8a686a74465c2d444090e Mon Sep 17 00:00:00 2001 From: Danny Yang Date: Sat, 4 Jan 2025 04:59:40 -0500 Subject: [PATCH 537/632] add types classifier and badge (#1272) --- README.rst | 6 +++++- pyproject.toml | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 3d0b13be..78d29820 100644 --- a/README.rst +++ b/README.rst @@ -4,7 +4,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python -|support-ukraine| |readthedocs-badge| |ci-badge| |pypi-badge| |pypi-download| |notebook-badge| +|support-ukraine| |readthedocs-badge| |ci-badge| |pypi-badge| |pypi-download| |notebook-badge| |types-badge| .. |support-ukraine| image:: https://img.shields.io/badge/Support-Ukraine-FFD500?style=flat&labelColor=005BBB :alt: Support Ukraine - Help Provide Humanitarian Aid to Ukraine. @@ -31,6 +31,10 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python :target: https://mybinder.org/v2/gh/Instagram/LibCST/main?filepath=docs%2Fsource%2Ftutorial.ipynb :alt: Notebook +.. |types-badge| image:: https://img.shields.io/pypi/types/libcst + :target: https://pypi.org/project/libcst + :alt: PYPI - Types + .. intro-start LibCST parses Python 3.0 -> 3.13 source code as a CST tree that keeps diff --git a/pyproject.toml b/pyproject.toml index fc8f851b..caf5b684 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Typing :: Typed", ] requires-python = ">=3.9" dependencies = ["pyyaml>=5.2"] From 595d7f6aaf6dbd710346b75f4f7f24d18147b827 Mon Sep 17 00:00:00 2001 From: Crozzers Date: Wed, 8 Jan 2025 19:58:37 +0000 Subject: [PATCH 538/632] Expose TypeAlias and TypeVar related structs in rust library (#1274) --- native/libcst/src/nodes/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index 9fbdb1af..d2bc0220 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -18,7 +18,8 @@ pub use statement::{ MatchPattern, MatchSequence, MatchSequenceElement, MatchSingleton, MatchStar, MatchTuple, MatchValue, NameItem, Nonlocal, OrElse, Pass, Raise, Return, SimpleStatementLine, SimpleStatementSuite, SmallStatement, StarrableMatchSequenceElement, Statement, Suite, Try, - TryStar, While, With, WithItem, + TryStar, TypeAlias, TypeParam, TypeParameters, TypeVar, TypeVarLike, TypeVarTuple, While, + With, WithItem, }; pub(crate) mod expression; From b523b360c1c5e3825faaf92f1396d35118277615 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 8 Jan 2025 20:02:17 +0000 Subject: [PATCH 539/632] run cargo fmt Summary: Test Plan: --- native/libcst/src/nodes/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index d2bc0220..ce02c86f 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -18,8 +18,8 @@ pub use statement::{ MatchPattern, MatchSequence, MatchSequenceElement, MatchSingleton, MatchStar, MatchTuple, MatchValue, NameItem, Nonlocal, OrElse, Pass, Raise, Return, SimpleStatementLine, SimpleStatementSuite, SmallStatement, StarrableMatchSequenceElement, Statement, Suite, Try, - TryStar, TypeAlias, TypeParam, TypeParameters, TypeVar, TypeVarLike, TypeVarTuple, While, - With, WithItem, + TryStar, TypeAlias, TypeParam, TypeParameters, TypeVar, TypeVarLike, TypeVarTuple, While, With, + WithItem, }; pub(crate) mod expression; From 20837f78243e8e8e3bf5b9c8c4c7ba28a47e6391 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 9 Jan 2025 18:39:18 +0000 Subject: [PATCH 540/632] ci: disable macos intel wheels (#1275) cibuildwheel fails to build these after a recent version upgrade --- .github/workflows/build.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b14b4210..a7c34bba 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,8 +10,7 @@ jobs: strategy: fail-fast: false matrix: - # macos-13 is an intel runner, macos-14 is apple silicon - os: [macos-13, macos-14, ubuntu-latest, windows-latest] + os: [macos-latest, ubuntu-latest, windows-latest] env: SCCACHE_VERSION: 0.2.13 CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" From d2382d81acb7964800fa9162da6cacb73a5c4581 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Thu, 9 Jan 2025 18:47:12 +0000 Subject: [PATCH 541/632] Upgrade pyo3 to 0.22 (#1180) * Upgrade pyo3 to 0.22 * libcst_native: add optional signature Newer versions of pyo3 warn about missing signatures --- native/Cargo.lock | 93 +++++------------------- native/libcst/Cargo.toml | 2 +- native/libcst/src/nodes/expression.rs | 13 ++-- native/libcst/src/nodes/parser_config.rs | 2 +- native/libcst/src/nodes/traits.rs | 2 +- native/libcst/src/parser/errors.rs | 13 ++-- native/libcst/src/py.rs | 3 +- native/libcst_derive/src/into_py.rs | 15 ++-- 8 files changed, 46 insertions(+), 97 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 00409e83..a1b519ec 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -35,12 +35,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - [[package]] name = "bitflags" version = "2.4.0" @@ -265,9 +259,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "heck" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" @@ -291,15 +285,6 @@ version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - [[package]] name = "is-terminal" version = "0.4.9" @@ -389,16 +374,6 @@ version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" -[[package]] -name = "lock_api" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" -dependencies = [ - "autocfg", - "scopeguard", -] - [[package]] name = "log" version = "0.4.17" @@ -453,31 +428,6 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall", - "smallvec", - "winapi", -] - [[package]] name = "paste" version = "1.0.15" @@ -550,15 +500,16 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.20.2" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0" +checksum = "831e8e819a138c36e212f3af3fd9eeffed6bf1510a805af35b0edee5ffa59433" dependencies = [ "cfg-if", "indoc", "libc", "memoffset 0.9.0", - "parking_lot", + "once_cell", + "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", @@ -567,9 +518,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.20.2" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be" +checksum = "1e8730e591b14492a8945cdff32f089250b05f5accecf74aeddf9e8272ce1fa8" dependencies = [ "once_cell", "target-lexicon", @@ -577,9 +528,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.20.2" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1" +checksum = "5e97e919d2df92eb88ca80a037969f44e5e70356559654962cbb3316d00300c6" dependencies = [ "libc", "pyo3-build-config", @@ -587,9 +538,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.20.2" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3" +checksum = "eb57983022ad41f9e683a599f2fd13c3664d7063a3ac5714cae4b7bee7d3f206" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -599,12 +550,13 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.20.2" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f" +checksum = "ec480c0c51ddec81019531705acac51bcdbeae563557c982aa8263bb96880372" dependencies = [ "heck", "proc-macro2", + "pyo3-build-config", "quote", "syn 2.0.75", ] @@ -638,15 +590,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "redox_syscall" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "regex" version = "1.10.6" @@ -682,7 +625,7 @@ version = "0.38.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" dependencies = [ - "bitflags 2.4.0", + "bitflags", "errno", "libc", "linux-raw-sys", @@ -781,9 +724,9 @@ dependencies = [ [[package]] name = "target-lexicon" -version = "0.12.4" +version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1" +checksum = "4873307b7c257eddcb50c9bedf158eb669578359fb28428bef438fec8e6ba7c2" [[package]] name = "termcolor" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 7cdcc61e..6e77feca 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.15" -pyo3 = { version = "0.20", optional = true } +pyo3 = { version = "0.22", optional = true } thiserror = "1.0.63" peg = "0.8.4" chic = "1.2.2" diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 971ef8bc..bc3699bf 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -2524,6 +2524,7 @@ impl<'r, 'a> Inflate<'a> for DeflatedNamedExpr<'r, 'a> { #[cfg(feature = "py")] mod py { + use pyo3::types::PyAnyMethods; use pyo3::types::PyModule; use super::*; @@ -2535,7 +2536,7 @@ mod py { match self { Self::Starred(s) => s.try_into_py(py), Self::Simple { value, comma } => { - let libcst = PyModule::import(py, "libcst")?; + let libcst = PyModule::import_bound(py, "libcst")?; let kwargs = [ Some(("value", value.try_into_py(py)?)), comma @@ -2547,11 +2548,11 @@ mod py { .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() - .into_py_dict(py); + .into_py_dict_bound(py); Ok(libcst .getattr("Element") .expect("no Element found in libcst") - .call((), Some(kwargs))? + .call((), Some(&kwargs))? .into()) } } @@ -2571,7 +2572,7 @@ mod py { whitespace_before_colon, .. } => { - let libcst = PyModule::import(py, "libcst")?; + let libcst = PyModule::import_bound(py, "libcst")?; let kwargs = [ Some(("key", key.try_into_py(py)?)), Some(("value", value.try_into_py(py)?)), @@ -2592,11 +2593,11 @@ mod py { .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() - .into_py_dict(py); + .into_py_dict_bound(py); Ok(libcst .getattr("DictElement") .expect("no Element found in libcst") - .call((), Some(kwargs))? + .call((), Some(&kwargs))? .into()) } } diff --git a/native/libcst/src/nodes/parser_config.rs b/native/libcst/src/nodes/parser_config.rs index 6dfdf544..4a80a048 100644 --- a/native/libcst/src/nodes/parser_config.rs +++ b/native/libcst/src/nodes/parser_config.rs @@ -125,7 +125,7 @@ fn parser_config_asdict<'py>(py: Python<'py>, config: PyRef<'py, ParserConfig>) ("version", config.version.clone_ref(py)), ("future_imports", config.future_imports.clone_ref(py)), ] - .into_py_dict(py) + .into_py_dict_bound(py) } pub fn init_module(_py: Python, m: &PyModule) -> PyResult<()> { diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index 1dd58132..397f6404 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -170,7 +170,7 @@ pub mod py { .map(|x| x.try_into_py(py)) .collect::>>()? .into_iter(); - Ok(PyTuple::new(py, converted).into()) + Ok(PyTuple::new_bound(py, converted).into()) } } diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index 79132ce1..85690c97 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -28,7 +28,7 @@ pub enum ParserError<'a> { #[cfg(feature = "py")] mod py_error { - use pyo3::types::{IntoPyDict, PyModule}; + use pyo3::types::{IntoPyDict, PyAnyMethods, PyModule}; use pyo3::{IntoPy, PyErr, PyErrArguments, Python}; use super::ParserError; @@ -65,13 +65,14 @@ mod py_error { ("raw_line", (line + 1).into_py(py)), ("raw_column", col.into_py(py)), ] - .into_py_dict(py); - let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); - PyErr::from_value( + .into_py_dict_bound(py); + let libcst = + PyModule::import_bound(py, "libcst").expect("libcst cannot be imported"); + PyErr::from_value_bound( libcst .getattr("ParserSyntaxError") .expect("ParserSyntaxError not found") - .call((), Some(kwargs)) + .call((), Some(&kwargs)) .expect("failed to instantiate"), ) }) @@ -86,7 +87,7 @@ mod py_error { ("raw_line", self.raw_line.into_py(py)), ("raw_column", self.raw_column.into_py(py)), ] - .into_py_dict(py) + .into_py_dict_bound(py) .into_py(py) } } diff --git a/native/libcst/src/py.rs b/native/libcst/src/py.rs index 960fac3a..bd7dfe6d 100644 --- a/native/libcst/src/py.rs +++ b/native/libcst/src/py.rs @@ -8,8 +8,9 @@ use pyo3::prelude::*; #[pymodule] #[pyo3(name = "native")] -pub fn libcst_native(_py: Python, m: &PyModule) -> PyResult<()> { +pub fn libcst_native(_py: Python, m: &Bound) -> PyResult<()> { #[pyfn(m)] + #[pyo3(signature = (source, encoding=None))] fn parse_module(source: String, encoding: Option<&str>) -> PyResult { let m = crate::parse_module(source.as_str(), encoding)?; Python::with_gil(|py| m.try_into_py(py)) diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs index 3894212f..ac656cb4 100644 --- a/native/libcst_derive/src/into_py.rs +++ b/native/libcst_derive/src/into_py.rs @@ -38,12 +38,14 @@ fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { let kwargs_toks = fields_to_kwargs(&var.fields, true); toks.push(quote! { Self::#varname { #(#fieldnames,)* .. } => { - let libcst = pyo3::types::PyModule::import(py, "libcst")?; + use pyo3::types::PyAnyMethods; + + let libcst = pyo3::types::PyModule::import_bound(py, "libcst")?; let kwargs = #kwargs_toks ; Ok(libcst .getattr(stringify!(#varname)) .expect(stringify!(no #varname found in libcst)) - .call((), Some(kwargs))? + .call((), Some(&kwargs))? .into()) } }) @@ -87,12 +89,13 @@ fn impl_into_py_struct(ast: &DeriveInput, e: &DataStruct) -> TokenStream { #[automatically_derived] impl#generics crate::nodes::traits::py::TryIntoPy for #ident #generics { fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { - let libcst = pyo3::types::PyModule::import(py, "libcst")?; + use pyo3::types::PyAnyMethods; + let libcst = pyo3::types::PyModule::import_bound(py, "libcst")?; let kwargs = #kwargs_toks ; Ok(libcst .getattr(stringify!(#ident)) .expect(stringify!(no #ident found in libcst)) - .call((), Some(kwargs))? + .call((), Some(&kwargs))? .into()) } } @@ -162,7 +165,7 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt #(#optional_rust_varnames.map(|x| x.try_into_py(py)).transpose()?.map(|x| (stringify!(#optional_py_varnames), x)),)* }; if empty_kwargs { - quote! { pyo3::types::PyDict::new(py) } + quote! { pyo3::types::PyDict::new_bound(py) } } else { quote! { [ #kwargs_pairs #optional_pairs ] @@ -170,7 +173,7 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() - .into_py_dict(py) + .into_py_dict_bound(py) } } } From 403782d5e99df26512714c31f4cb17540d116aab Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 9 Jan 2025 18:50:02 +0000 Subject: [PATCH 542/632] Cargo.lock changes --- native/Cargo.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index a1b519ec..76a33536 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -489,6 +489,12 @@ dependencies = [ "plotters-backend", ] +[[package]] +name = "portable-atomic" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31114a898e107c51bb1609ffaf55a0e011cf6a4d7f1170d0015a165082c0338b" + [[package]] name = "proc-macro2" version = "1.0.86" @@ -694,12 +700,6 @@ dependencies = [ "serde", ] -[[package]] -name = "smallvec" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc88c725d61fc6c3132893370cac4a0200e3fedf5da8331c570664b1987f5ca2" - [[package]] name = "syn" version = "1.0.109" From 6b483c61132297e931696d0542c077d3c0dc794f Mon Sep 17 00:00:00 2001 From: Danny Yang Date: Thu, 9 Jan 2025 13:59:00 -0500 Subject: [PATCH 543/632] Add codemod to rename typing aliases of builtins (#1267) * add codemod to rename typing aliases of builtins * format --- libcst/codemod/commands/rename.py | 2 +- .../commands/rename_typing_generic_aliases.py | 37 +++++++++++++++++++ libcst/codemod/commands/tests/test_rename.py | 13 +++++++ .../test_rename_typing_generic_aliases.py | 33 +++++++++++++++++ 4 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 libcst/codemod/commands/rename_typing_generic_aliases.py create mode 100644 libcst/codemod/commands/tests/test_rename_typing_generic_aliases.py diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index ee196582..361e33e6 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -328,7 +328,7 @@ class RenameCommand(VisitorBasedCodemodCommand): # If bypass_import is False, we know that no import statements were directly renamed, and the fact # that we have any `self.scheduled_removals` tells us we encountered a matching `old_name` in the code. if not self.bypass_import and self.scheduled_removals: - if self.new_module: + if self.new_module and self.new_module != "builtins": new_obj: Optional[str] = ( self.new_mod_or_obj.split(".")[0] if self.new_mod_or_obj else None ) diff --git a/libcst/codemod/commands/rename_typing_generic_aliases.py b/libcst/codemod/commands/rename_typing_generic_aliases.py new file mode 100644 index 00000000..d6906fe9 --- /dev/null +++ b/libcst/codemod/commands/rename_typing_generic_aliases.py @@ -0,0 +1,37 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict +from functools import partial +from typing import cast, Generator + +from libcst.codemod import Codemod, MagicArgsCodemodCommand +from libcst.codemod.commands.rename import RenameCommand + + +class RenameTypingGenericAliases(MagicArgsCodemodCommand): + DESCRIPTION: str = ( + "Rename typing module aliases of builtin generics in Python 3.9+, for example: `typing.List` -> `list`" + ) + + MAPPING: dict[str, str] = { + "typing.List": "builtins.list", + "typing.Tuple": "builtins.tuple", + "typing.Dict": "builtins.dict", + "typing.FrozenSet": "builtins.frozenset", + "typing.Set": "builtins.set", + "typing.Type": "builtins.type", + } + + def get_transforms(self) -> Generator[type[Codemod], None, None]: + for from_type, to_type in self.MAPPING.items(): + yield cast( + type[Codemod], + partial( + RenameCommand, + old_name=from_type, + new_name=to_type, + ), + ) diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index 6e8ef936..5b6e0128 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -28,6 +28,19 @@ class TestRenameCommand(CodemodTest): self.assertCodemod(before, after, old_name="foo.bar", new_name="baz.qux") + def test_rename_to_builtin(self) -> None: + before = """ + from typing import List + x: List[int] = [] + """ + after = """ + x: list[int] = [] + """ + + self.assertCodemod( + before, after, old_name="typing.List", new_name="builtins.list" + ) + def test_rename_name_asname(self) -> None: before = """ from foo import bar as bla diff --git a/libcst/codemod/commands/tests/test_rename_typing_generic_aliases.py b/libcst/codemod/commands/tests/test_rename_typing_generic_aliases.py new file mode 100644 index 00000000..7a0a83c3 --- /dev/null +++ b/libcst/codemod/commands/tests/test_rename_typing_generic_aliases.py @@ -0,0 +1,33 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +# pyre-strict + +from libcst.codemod import CodemodTest +from libcst.codemod.commands.rename_typing_generic_aliases import ( + RenameTypingGenericAliases, +) + + +class TestRenameCommand(CodemodTest): + TRANSFORM = RenameTypingGenericAliases + + def test_rename_typing_generic_alias(self) -> None: + before = """ + from typing import List, Set, Dict, FrozenSet, Tuple + x: List[int] = [] + y: Set[int] = set() + z: Dict[str, int] = {} + a: FrozenSet[str] = frozenset() + b: Tuple[int, str] = (1, "hello") + """ + after = """ + x: list[int] = [] + y: set[int] = set() + z: dict[str, int] = {} + a: frozenset[str] = frozenset() + b: tuple[int, str] = (1, "hello") + """ + self.assertCodemod(before, after) From af136b91acc59745ff875f086e58e8712b97f550 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Jan 2025 18:59:38 +0000 Subject: [PATCH 544/632] Bump astral-sh/setup-uv from 4 to 5 (#1264) Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 4 to 5. - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v4...v5) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/zizmor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml index dad6232f..797b8d9b 100644 --- a/.github/workflows/zizmor.yml +++ b/.github/workflows/zizmor.yml @@ -21,7 +21,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v5 - name: Run zizmor 🌈 run: uvx zizmor --format sarif . > results.sarif From 01c29394453d747db049617a8929b98ec6fd1655 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Jan 2025 19:00:34 +0000 Subject: [PATCH 545/632] Bump jinja2 from 3.1.4 to 3.1.5 (#1265) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.4 to 3.1.5. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.4...3.1.5) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index caf5b684..ea3953a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ dev = [ "usort==1.0.8.post1", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", - "jinja2==3.1.4", + "jinja2==3.1.5", ] [project.urls] From c825afb87d96a426fcaafde5be2478531e6bbc26 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 9 Jan 2025 19:09:48 +0000 Subject: [PATCH 546/632] Bump to 1.6.0 Summary: Test Plan: --- CHANGELOG.md | 30 ++++++++++++++++++++++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 35 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e9d5aab..c2a559b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,33 @@ +# 1.6.0 - 2024-01-09 + +## Fixed + +* rename: store state in scratch by @zsol in https://github.com/Instagram/LibCST/pull/1250 +* rename: handle imports via a parent module by @zsol in https://github.com/Instagram/LibCST/pull/1251 +* rename: Fix imports with aliases by @zsol in https://github.com/Instagram/LibCST/pull/1252 +* rename: don't leave trailing commas by @zsol in https://github.com/Instagram/LibCST/pull/1254 +* rename: don't eat commas unnecessarily by @zsol in https://github.com/Instagram/LibCST/pull/1256 +* rename: fix renaming toplevel names by @zsol in https://github.com/Instagram/LibCST/pull/1260 +* bump 3.12 to 3.13 in readme by @khameeteman in https://github.com/Instagram/LibCST/pull/1228 + +## Added + +* Add codemod to convert `typing.Union` to `|` by @yangdanny97 in https://github.com/Instagram/LibCST/pull/1270 +* Add codemod to fix variadic callable annotations by @yangdanny97 in https://github.com/Instagram/LibCST/pull/1269 +* Add codemod to rename typing aliases of builtins by @yangdanny97 in https://github.com/Instagram/LibCST/pull/1267 +* Add typing classifier to pyproject.toml and badge to README by @yangdanny97 in https://github.com/Instagram/LibCST/pull/1272 +* Expose TypeAlias and TypeVar related structs in rust library by @Crozzers in https://github.com/Instagram/LibCST/pull/1274 + +## Updated +* Upgrade pyo3 to 0.22 by @jelmer in https://github.com/Instagram/LibCST/pull/1180 + +## New Contributors +* @yangdanny97 made their first contribution in https://github.com/Instagram/LibCST/pull/1270 +* @Crozzers made their first contribution in https://github.com/Instagram/LibCST/pull/1274 +* @jelmer made their first contribution in https://github.com/Instagram/LibCST/pull/1180 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.5.1...v1.6.0 + # 1.5.1 - 2024-11-18 ## Added diff --git a/native/Cargo.lock b/native/Cargo.lock index 76a33536..2d37e231 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -343,7 +343,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.5.1" +version = "1.6.0" dependencies = [ "chic", "criterion", @@ -361,7 +361,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.5.1" +version = "1.6.0" dependencies = [ "quote", "syn 2.0.75", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 6e77feca..b8abcd1f 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.5.1" +version = "1.6.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.4" chic = "1.2.2" regex = "1.10.6" memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.4.0" } +libcst_derive = { path = "../libcst_derive", version = "1.6.0" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 9ede217d..b645d93b 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.5.1" +version = "1.6.0" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From 985cec808e6bf2ed859e3f7cdc2da7710ba32776 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Fri, 21 Feb 2025 16:20:49 -0600 Subject: [PATCH 547/632] Remove dependency on `chic` and upgrade `annotate-snippets` (#1293) * Vendor `chic` At https://github.com/yoshuawuyts/chic/commit/0761036492338f6e77a414af572cb8306f46b43d * Remove unused `Error::help` method * Upgrade to `annotate_snippets` 0.9.x Applying https://github.com/yoshuawuyts/chic/commit/27c99b5038c0a028ec9614d4e32dde66faa5ce36 * Upgrade to `annotate_snippets` 0.10.x See https://salsa.debian.org/rust-team/debcargo-conf/-/blob/master/src/chic/debian/patches/annotate-snippets-0.10 * Upgrade to `annotate_snippets` 0.11.x As in https://salsa.debian.org/rust-team/debcargo-conf/-/blob/master/src/chic/debian/patches/annotate-snippets-0.11 * Drop `chic` compatibility layer --- native/Cargo.lock | 29 ++++++++++++++------------- native/libcst/Cargo.toml | 2 +- native/libcst/src/lib.rs | 43 ++++++++++++++++++++++------------------ 3 files changed, 40 insertions(+), 34 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 2d37e231..c241e857 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -19,15 +19,19 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "annotate-snippets" -version = "0.6.1" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7021ce4924a3f25f802b2cccd1af585e39ea1a363a1aa2e72afe54b67a3a7a7" +checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" +dependencies = [ + "anstyle", + "unicode-width", +] [[package]] name = "anstyle" -version = "1.0.2" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "autocfg" @@ -68,15 +72,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chic" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5b5db619f3556839cb2223ae86ff3f9a09da2c5013be42bc9af08c9589bf70c" -dependencies = [ - "annotate-snippets", -] - [[package]] name = "ciborium" version = "0.2.0" @@ -345,7 +340,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" name = "libcst" version = "1.6.0" dependencies = [ - "chic", + "annotate-snippets", "criterion", "difference", "itertools 0.13.0", @@ -821,6 +816,12 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + [[package]] name = "unindent" version = "0.2.3" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index b8abcd1f..d4e71f62 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -39,7 +39,7 @@ paste = "1.0.15" pyo3 = { version = "0.22", optional = true } thiserror = "1.0.63" peg = "0.8.4" -chic = "1.2.2" +annotate-snippets = "0.11.5" regex = "1.10.6" memchr = "2.7.4" libcst_derive = { path = "../libcst_derive", version = "1.6.0" } diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index 03a97173..93e6bd05 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -91,32 +91,37 @@ fn bol_offset(source: &str, n: i32) -> usize { pub fn prettify_error(err: ParserError, label: &str) -> std::string::String { match err { ParserError::ParserError(e, module_text) => { + use annotate_snippets::{Level, Renderer, Snippet}; + let loc = e.location; let context = 1; + let line_start = max( + 1, + loc.start_pos + .line + .checked_sub(context as usize) + .unwrap_or(1), + ); let start_offset = bol_offset(module_text, loc.start_pos.line as i32 - context); let end_offset = bol_offset(module_text, loc.end_pos.line as i32 + context + 1); let source = &module_text[start_offset..end_offset]; let start = loc.start_pos.offset - start_offset; let end = loc.end_pos.offset - start_offset; - chic::Error::new(label) - .error( - max( - 1, - loc.start_pos - .line - .checked_sub(context as usize) - .unwrap_or(1), - ), - start, - if start == end { - min(end + 1, end_offset - start_offset + 1) - } else { - end - }, - source, - format!( - "expected {} {} -> {}", - e.expected, loc.start_pos, loc.end_pos + let end = if start == end { + min(end + 1, end_offset - start_offset + 1) + } else { + end + }; + Renderer::styled() + .render( + Level::Error.title(label).snippet( + Snippet::source(source) + .line_start(line_start) + .fold(false) + .annotations(vec![Level::Error.span(start..end).label(&format!( + "expected {} {} -> {}", + e.expected, loc.start_pos, loc.end_pos + ))]), ), ) .to_string() From edd75bfa62ca458a60336783404f2b9dc520b6ab Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Mar 2025 11:23:36 +0000 Subject: [PATCH 548/632] Bump pypa/cibuildwheel from 2.22.0 to 2.23.0 (#1299) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.22.0 to 2.23.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.22.0...v2.23.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a7c34bba..5d31667c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,7 +42,7 @@ jobs: with: platforms: all - name: Build wheels - uses: pypa/cibuildwheel@v2.22.0 + uses: pypa/cibuildwheel@v2.23.0 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From eae77997be16bc190c539f5ce3e331be08370b61 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 7 Mar 2025 14:18:25 +0000 Subject: [PATCH 549/632] ci: install libatomic on linux before rustup (#1301) --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5d31667c..596c6e84 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -13,7 +13,7 @@ jobs: os: [macos-latest, ubuntu-latest, windows-latest] env: SCCACHE_VERSION: 0.2.13 - CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" + CIBW_BEFORE_ALL_LINUX: "yum install -y libatomic; curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" CIBW_BEFORE_BUILD_LINUX: "rm -rf native/target; ln -s /host/${{github.workspace}}/native/target native/target; [ -d /host/${{github.workspace}}/native/target ] || mkdir /host/${{github.workspace}}/native/target" CIBW_ENVIRONMENT_LINUX: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME CARGO_HOME=/host/home/runner/.cargo' CIBW_BEFORE_ALL_MACOS: "rustup target add aarch64-apple-darwin x86_64-apple-darwin" From 64ca5ed8dff0e25cded9e085f861a4ece05be4a6 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 7 Mar 2025 14:21:41 +0000 Subject: [PATCH 550/632] ci: move cibuildwheel config into pyproject.toml (#1277) --- .github/workflows/build.yml | 10 +--------- pyproject.toml | 29 +++++++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 596c6e84..6e6f2cd2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -13,15 +13,7 @@ jobs: os: [macos-latest, ubuntu-latest, windows-latest] env: SCCACHE_VERSION: 0.2.13 - CIBW_BEFORE_ALL_LINUX: "yum install -y libatomic; curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" - CIBW_BEFORE_BUILD_LINUX: "rm -rf native/target; ln -s /host/${{github.workspace}}/native/target native/target; [ -d /host/${{github.workspace}}/native/target ] || mkdir /host/${{github.workspace}}/native/target" - CIBW_ENVIRONMENT_LINUX: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME CARGO_HOME=/host/home/runner/.cargo' - CIBW_BEFORE_ALL_MACOS: "rustup target add aarch64-apple-darwin x86_64-apple-darwin" - CIBW_BEFORE_ALL_WINDOWS: "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" - CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME' - CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_i686 *-musllinux_ppc64le *-musllinux_s390x *-musllinux_armv7l" - CIBW_ARCHS_LINUX: auto aarch64 - CIBW_BUILD_VERBOSITY: 1 + GITHUB_WORKSPACE: "${{github.workspace}}" steps: - uses: actions/checkout@v4 with: diff --git a/pyproject.toml b/pyproject.toml index ea3953a8..e12a296b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,3 +80,32 @@ exclude-modules = '^libcst\.(testing|tests)' [tool.ufmt] excludes = ["native/", "stubs/"] + +[tool.cibuildwheel] +build-verbosity = 1 +environment = { PATH = "$PATH:$HOME/.cargo/bin" } +skip = [ + "pp*", + "*-win32", + "*-win_arm64", + "*-musllinux_i686", + "*-musllinux_ppc64le", + "*-musllinux_s390x", + "*-musllinux_armv7l", +] + +[tool.cibuildwheel.linux] +archs = ["auto", "aarch64"] +environment-pass = ["LIBCST_NO_LOCAL_SCHEME", "GITHUB_WORKSPACE"] +before-all = "yum install -y libatomic; curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" +before-build = [ + "rm -rf native/target", + "ln -s /host/${GITHUB_WORKSPACE}/native/target native/target", + "[ -d /host/${GITHUB_WORKSPACE}/native/target ] || mkdir /host/${GITHUB_WORKSPACE}/native/target", +] + +[tool.cibuildwheel.macos] +before-all = "rustup target add aarch64-apple-darwin x86_64-apple-darwin" + +[tool.cibuildwheel.windows] +before-all = "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" From 5eccb5f08b7c476c42643d1a205fd0ef82b3d797 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 7 Mar 2025 15:32:39 +0000 Subject: [PATCH 551/632] ci: use native arm github runners (#1303) --- .github/workflows/build.yml | 7 +------ pyproject.toml | 7 ------- 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6e6f2cd2..812d1577 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,7 +10,7 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-latest, ubuntu-latest, windows-latest] + os: [macos-latest, ubuntu-latest, ubuntu-24.04-arm, windows-latest] env: SCCACHE_VERSION: 0.2.13 GITHUB_WORKSPACE: "${{github.workspace}}" @@ -28,11 +28,6 @@ jobs: if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - - name: Set up QEMU - if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v3 - with: - platforms: all - name: Build wheels uses: pypa/cibuildwheel@v2.23.0 - uses: actions/upload-artifact@v4 diff --git a/pyproject.toml b/pyproject.toml index e12a296b..19b9da1a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -95,14 +95,7 @@ skip = [ ] [tool.cibuildwheel.linux] -archs = ["auto", "aarch64"] -environment-pass = ["LIBCST_NO_LOCAL_SCHEME", "GITHUB_WORKSPACE"] before-all = "yum install -y libatomic; curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" -before-build = [ - "rm -rf native/target", - "ln -s /host/${GITHUB_WORKSPACE}/native/target native/target", - "[ -d /host/${GITHUB_WORKSPACE}/native/target ] || mkdir /host/${GITHUB_WORKSPACE}/native/target", -] [tool.cibuildwheel.macos] before-all = "rustup target add aarch64-apple-darwin x86_64-apple-darwin" From 727e433539802d14a6f3320be0453213c8afb8a1 Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Fri, 7 Mar 2025 08:35:17 -0700 Subject: [PATCH 552/632] Update for Pyo3 0.23 (#1289) * Update Cargo.lock and Cargo.toml for PyO3 0.23 support * Replace deprecated _bound methods with their new undeprecated names * Update TryIntoPy trait to use IntoPyObject * Update ParserError wrapper to use IntoPyObject * replace unwrap with early return --- native/Cargo.lock | 20 +++++----- native/libcst/Cargo.toml | 2 +- native/libcst/src/nodes/expression.rs | 8 ++-- native/libcst/src/nodes/traits.rs | 23 ++---------- native/libcst/src/parser/errors.rs | 54 ++++++++------------------- 5 files changed, 35 insertions(+), 72 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index c241e857..1c6cbf60 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -501,9 +501,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.22.2" +version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "831e8e819a138c36e212f3af3fd9eeffed6bf1510a805af35b0edee5ffa59433" +checksum = "57fe09249128b3173d092de9523eaa75136bf7ba85e0d69eca241c7939c933cc" dependencies = [ "cfg-if", "indoc", @@ -519,9 +519,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.22.2" +version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e8730e591b14492a8945cdff32f089250b05f5accecf74aeddf9e8272ce1fa8" +checksum = "1cd3927b5a78757a0d71aa9dff669f903b1eb64b54142a9bd9f757f8fde65fd7" dependencies = [ "once_cell", "target-lexicon", @@ -529,9 +529,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.22.2" +version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e97e919d2df92eb88ca80a037969f44e5e70356559654962cbb3316d00300c6" +checksum = "dab6bb2102bd8f991e7749f130a70d05dd557613e39ed2deeee8e9ca0c4d548d" dependencies = [ "libc", "pyo3-build-config", @@ -539,9 +539,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.22.2" +version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb57983022ad41f9e683a599f2fd13c3664d7063a3ac5714cae4b7bee7d3f206" +checksum = "91871864b353fd5ffcb3f91f2f703a22a9797c91b9ab497b1acac7b07ae509c7" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -551,9 +551,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.22.2" +version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec480c0c51ddec81019531705acac51bcdbeae563557c982aa8263bb96880372" +checksum = "43abc3b80bc20f3facd86cd3c60beed58c3e2aa26213f3cda368de39c60a27e4" dependencies = [ "heck", "proc-macro2", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index d4e71f62..ea56e358 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.15" -pyo3 = { version = "0.22", optional = true } +pyo3 = { version = "0.23", optional = true } thiserror = "1.0.63" peg = "0.8.4" annotate-snippets = "0.11.5" diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index bc3699bf..cbd1e361 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -2536,7 +2536,7 @@ mod py { match self { Self::Starred(s) => s.try_into_py(py), Self::Simple { value, comma } => { - let libcst = PyModule::import_bound(py, "libcst")?; + let libcst = PyModule::import(py, "libcst")?; let kwargs = [ Some(("value", value.try_into_py(py)?)), comma @@ -2548,7 +2548,7 @@ mod py { .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() - .into_py_dict_bound(py); + .into_py_dict(py)?; Ok(libcst .getattr("Element") .expect("no Element found in libcst") @@ -2572,7 +2572,7 @@ mod py { whitespace_before_colon, .. } => { - let libcst = PyModule::import_bound(py, "libcst")?; + let libcst = PyModule::import(py, "libcst")?; let kwargs = [ Some(("key", key.try_into_py(py)?)), Some(("value", value.try_into_py(py)?)), @@ -2593,7 +2593,7 @@ mod py { .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() - .into_py_dict_bound(py); + .into_py_dict(py)?; Ok(libcst .getattr("DictElement") .expect("no Element found in libcst") diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index 397f6404..df61538b 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -118,7 +118,7 @@ impl<'a, T: Inflate<'a>> Inflate<'a> for Vec { } #[cfg(feature = "py")] pub mod py { - use pyo3::{types::PyAny, types::PyTuple, IntoPy, PyObject, PyResult, Python}; + use pyo3::{types::PyTuple, IntoPyObjectExt, PyObject, PyResult, Python}; // TODO: replace with upstream implementation once // https://github.com/PyO3/pyo3/issues/1813 is resolved @@ -135,7 +135,7 @@ pub mod py { impl TryIntoPy for bool { fn try_into_py(self, py: Python) -> PyResult { - Ok(self.into_py(py)) + self.into_py_any(py) } } @@ -170,28 +170,13 @@ pub mod py { .map(|x| x.try_into_py(py)) .collect::>>()? .into_iter(); - Ok(PyTuple::new_bound(py, converted).into()) - } - } - - impl TryIntoPy for PyTuple { - fn try_into_py(self, py: Python) -> PyResult { - Ok(self.into_py(py)) + PyTuple::new(py, converted)?.into_py_any(py) } } impl<'a> TryIntoPy for &'a str { fn try_into_py(self, py: Python) -> PyResult { - Ok(self.into_py(py)) - } - } - - impl TryIntoPy for &'_ T - where - T: AsRef, - { - fn try_into_py(self, py: Python) -> PyResult { - Ok(self.into_py(py)) + self.into_py_any(py) } } } diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index 85690c97..8237cd0b 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -28,18 +28,11 @@ pub enum ParserError<'a> { #[cfg(feature = "py")] mod py_error { - use pyo3::types::{IntoPyDict, PyAnyMethods, PyModule}; - use pyo3::{IntoPy, PyErr, PyErrArguments, Python}; + use pyo3::types::{IntoPyDict, PyAny, PyAnyMethods, PyModule}; + use pyo3::{Bound, IntoPyObject, PyErr, PyResult, Python}; use super::ParserError; - struct Details { - message: String, - lines: Vec, - raw_line: u32, - raw_column: u32, - } - impl<'a> From> for PyErr { fn from(e: ParserError) -> Self { Python::with_gil(|py| { @@ -59,36 +52,21 @@ mod py_error { line = lines.len() - 1; col = 0; } - let kwargs = [ - ("message", e.to_string().into_py(py)), - ("lines", lines.into_py(py)), - ("raw_line", (line + 1).into_py(py)), - ("raw_column", col.into_py(py)), - ] - .into_py_dict_bound(py); - let libcst = - PyModule::import_bound(py, "libcst").expect("libcst cannot be imported"); - PyErr::from_value_bound( - libcst - .getattr("ParserSyntaxError") - .expect("ParserSyntaxError not found") - .call((), Some(&kwargs)) - .expect("failed to instantiate"), - ) + match || -> PyResult> { + let kwargs = [ + ("message", e.to_string().into_pyobject(py)?.into_any()), + ("lines", lines.into_pyobject(py)?.into_any()), + ("raw_line", (line + 1).into_pyobject(py)?.into_any()), + ("raw_column", col.into_pyobject(py)?.into_any()), + ] + .into_py_dict(py)?; + let libcst = PyModule::import(py, "libcst")?; + libcst.getattr("ParserSyntaxError")?.call((), Some(&kwargs)) + }() { + Ok(py_err_value) => PyErr::from_value(py_err_value), + Err(e) => e, + } }) } } - - impl<'a> PyErrArguments for Details { - fn arguments(self, py: pyo3::Python) -> pyo3::PyObject { - [ - ("message", self.message.into_py(py)), - ("lines", self.lines.into_py(py)), - ("raw_line", self.raw_line.into_py(py)), - ("raw_column", self.raw_column.into_py(py)), - ] - .into_py_dict_bound(py) - .into_py(py) - } - } } From e2e712d43f362ae3b47e11faa1bbb0260810444e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Mar 2025 15:36:26 +0000 Subject: [PATCH 553/632] Bump flake8 from 7.1.1 to 7.1.2 (#1292) Bumps [flake8](https://github.com/pycqa/flake8) from 7.1.1 to 7.1.2. - [Commits](https://github.com/pycqa/flake8/compare/7.1.1...7.1.2) --- updated-dependencies: - dependency-name: flake8 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 19b9da1a..b64fb60a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ dev = [ "coverage[toml]>=4.5.4", "build>=0.10.0", "fixit==2.1.0", - "flake8==7.1.1", + "flake8==7.1.2", "Sphinx>=5.1.1", "hypothesis>=4.36.0", "hypothesmith>=0.0.4", From 218e8e5d43429fbf5abed31e4018a26c0103e82b Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 7 Mar 2025 16:29:53 +0000 Subject: [PATCH 554/632] ci: strip local scheme from uploaded wheels --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index b64fb60a..33143da3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,6 +84,7 @@ excludes = ["native/", "stubs/"] [tool.cibuildwheel] build-verbosity = 1 environment = { PATH = "$PATH:$HOME/.cargo/bin" } +environment-pass = ["LIBCST_NO_LOCAL_SCHEME"] skip = [ "pp*", "*-win32", From cd959d66c0383daec367d3371f58fb4181935f62 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 7 Mar 2025 17:23:58 +0000 Subject: [PATCH 555/632] ci: pass through LIBCST_NO_LOCAL_SCHEME try #2 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 33143da3..fb227ce9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,6 +96,7 @@ skip = [ ] [tool.cibuildwheel.linux] +environment-pass = ["LIBCST_NO_LOCAL_SCHEME"] before-all = "yum install -y libatomic; curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" [tool.cibuildwheel.macos] From 129d9876d29bc56319c848fe6ed21461c14a7634 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 7 Mar 2025 18:12:22 +0000 Subject: [PATCH 556/632] ci: force LIBCST_NO_LOCAL_SCHEME in cibuildwheel Summary: Test Plan: --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fb227ce9..637ae895 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,8 +83,7 @@ excludes = ["native/", "stubs/"] [tool.cibuildwheel] build-verbosity = 1 -environment = { PATH = "$PATH:$HOME/.cargo/bin" } -environment-pass = ["LIBCST_NO_LOCAL_SCHEME"] +environment = { PATH = "$PATH:$HOME/.cargo/bin", LIBCST_NO_LOCAL_SCHEME="1" } skip = [ "pp*", "*-win32", From d580469ea515fa6fc6ef38054f3f181ddfea0157 Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Wed, 12 Mar 2025 15:57:31 -0600 Subject: [PATCH 557/632] add free-threaded CI (#1312) --- .github/workflows/ci.yml | 51 ++++++++++++++++++++++++++++++++++++++-- pyproject.toml | 7 ++++-- 2 files changed, 54 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ab290f54..96bb960f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,6 +44,51 @@ jobs: hatch run coverage combine .coverage.pure hatch run coverage report + # TODO: + # merge into regular CI once hatch has support for creating environments on + # the free-threaded build: https://github.com/pypa/hatch/issues/1931 + free-threaded-tests: + name: "test (${{ matrix.os }}, 3.13t)" + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + persist-credentials: false + # TODO: update to tagged release when there is one + - uses: actions/setup-python@9e62be81b28222addecf85e47571213eb7680449 + with: + cache: pip + cache-dependency-path: "pyproject.toml" + python-version: '3.13t' + - name: Build LibCST + run: | + # Install build-system.requires dependencies + pip install setuptools setuptools-scm setuptools-rust wheel + # Jupyter is annoying to install on free-threaded Python + pip install -e .[dev-without-jupyter] --no-build-isolation + - name: Native Parser Tests + # TODO: remove when native modules declare free-threaded support + env: + PYTHON_GIL: '0' + run: | + python -m coverage run -m libcst.tests + - name: Pure Parser Tests + env: + COVERAGE_FILE: .coverage.pure + LIBCST_PARSER_TYPE: pure + run: | + python -m coverage run -m libcst.tests + - name: Coverage + run: | + python -m coverage combine .coverage.pure + python -m coverage report + + # Run linters lint: runs-on: ubuntu-latest @@ -110,6 +155,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] + python-version: ["3.10", "3.13t"] steps: - uses: actions/checkout@v4 with: @@ -117,9 +163,10 @@ jobs: - uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy - - uses: actions/setup-python@v5 + # TODO: update to tagged release when there is one + - uses: actions/setup-python@9e62be81b28222addecf85e47571213eb7680449 with: - python-version: "3.10" + python-version: ${{ matrix.python-version }} - name: test uses: actions-rs/cargo@v1 with: diff --git a/pyproject.toml b/pyproject.toml index 637ae895..83f208e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,11 @@ dependencies = ["pyyaml>=5.2"] [project.optional-dependencies] dev = [ + "libcst[dev-without-jupyter]", + "jupyter>=1.0.0", + "nbsphinx>=0.4.2", +] +dev-without-jupyter = [ "black==24.8.0", "coverage[toml]>=4.5.4", "build>=0.10.0", @@ -30,9 +35,7 @@ dev = [ "Sphinx>=5.1.1", "hypothesis>=4.36.0", "hypothesmith>=0.0.4", - "jupyter>=1.0.0", "maturin>=1.7.0,<1.8", - "nbsphinx>=0.4.2", "prompt-toolkit>=2.0.9", "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", From 17eafc3f437dd770ca47762f2005bbd4e99aa973 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 13 Mar 2025 08:39:55 +0100 Subject: [PATCH 558/632] Bump PyO3 to 0.23.5 (#1311) --- native/Cargo.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 1c6cbf60..7352b7f8 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -501,9 +501,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.23.4" +version = "0.23.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57fe09249128b3173d092de9523eaa75136bf7ba85e0d69eca241c7939c933cc" +checksum = "7778bffd85cf38175ac1f545509665d0b9b92a198ca7941f131f85f7a4f9a872" dependencies = [ "cfg-if", "indoc", @@ -519,9 +519,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.23.4" +version = "0.23.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd3927b5a78757a0d71aa9dff669f903b1eb64b54142a9bd9f757f8fde65fd7" +checksum = "94f6cbe86ef3bf18998d9df6e0f3fc1050a8c5efa409bf712e661a4366e010fb" dependencies = [ "once_cell", "target-lexicon", @@ -529,9 +529,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.23.4" +version = "0.23.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dab6bb2102bd8f991e7749f130a70d05dd557613e39ed2deeee8e9ca0c4d548d" +checksum = "e9f1b4c431c0bb1c8fb0a338709859eed0d030ff6daa34368d3b152a63dfdd8d" dependencies = [ "libc", "pyo3-build-config", @@ -539,9 +539,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.23.4" +version = "0.23.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91871864b353fd5ffcb3f91f2f703a22a9797c91b9ab497b1acac7b07ae509c7" +checksum = "fbc2201328f63c4710f68abdf653c89d8dbc2858b88c5d88b0ff38a75288a9da" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -551,9 +551,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.23.4" +version = "0.23.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43abc3b80bc20f3facd86cd3c60beed58c3e2aa26213f3cda368de39c60a27e4" +checksum = "fca6726ad0f3da9c9de093d6f116a93c1a38e417ed73bf138472cf4064f72028" dependencies = [ "heck", "proc-macro2", From 5902ccede34e2a6ca5ff3d32ca7e05816029031b Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 13 Mar 2025 09:56:58 +0000 Subject: [PATCH 559/632] Bump version to 1.7.0 (#1313) --- CHANGELOG.md | 19 ++++++++++++++++++- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 22 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c2a559b7..b0ade209 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,21 @@ -# 1.6.0 - 2024-01-09 +# 1.7.0 - 2025-03-13 + +## Added +* add free-threaded CI by @ngoldbaum in https://github.com/Instagram/LibCST/pull/1312 + +## Updated +* Remove dependency on `chic` and upgrade `annotate-snippets` by @zanieb in https://github.com/Instagram/LibCST/pull/1293 +* Update for Pyo3 0.23 by @ngoldbaum in https://github.com/Instagram/LibCST/pull/1289 +* Bump PyO3 to 0.23.5 by @mgorny in https://github.com/Instagram/LibCST/pull/1311 + +## New Contributors +* @zanieb made their first contribution in https://github.com/Instagram/LibCST/pull/1293 +* @ngoldbaum made their first contribution in https://github.com/Instagram/LibCST/pull/1289 +* @mgorny made their first contribution in https://github.com/Instagram/LibCST/pull/1311 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.6.0...v1.7.0 + +# 1.6.0 - 2025-01-09 ## Fixed diff --git a/native/Cargo.lock b/native/Cargo.lock index 7352b7f8..620ce4ea 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -338,7 +338,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.6.0" +version = "1.7.0" dependencies = [ "annotate-snippets", "criterion", @@ -356,7 +356,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.6.0" +version = "1.7.0" dependencies = [ "quote", "syn 2.0.75", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index ea56e358..2d3c999b 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.6.0" +version = "1.7.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index b645d93b..8d5fd2ff 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.6.0" +version = "1.7.0" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From 79f736ac605b147a2b5cd14ab068a43a92f474a8 Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Thu, 13 Mar 2025 13:17:02 -0600 Subject: [PATCH 560/632] ci: don't use `--no-build-isolation` for free-threaded CI (#1314) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 96bb960f..88e824bb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,7 +70,7 @@ jobs: # Install build-system.requires dependencies pip install setuptools setuptools-scm setuptools-rust wheel # Jupyter is annoying to install on free-threaded Python - pip install -e .[dev-without-jupyter] --no-build-isolation + pip install -e .[dev-without-jupyter] - name: Native Parser Tests # TODO: remove when native modules declare free-threaded support env: From 2c7834eae6a821868c4165ea0f7f99a5bbf5fbc6 Mon Sep 17 00:00:00 2001 From: Hadi Alqattan Date: Sun, 23 Mar 2025 00:24:15 +0300 Subject: [PATCH 561/632] ci: enable macos intel wheels (#1316) --- .github/workflows/build.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 812d1577..b28d8d5f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,7 +10,8 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-latest, ubuntu-latest, ubuntu-24.04-arm, windows-latest] + # macos-13 is an intel runner, macos-latest is apple silicon + os: [macos-13, macos-latest, ubuntu-latest, ubuntu-24.04-arm, windows-latest] env: SCCACHE_VERSION: 0.2.13 GITHUB_WORKSPACE: "${{github.workspace}}" @@ -24,6 +25,10 @@ jobs: cache: pip cache-dependency-path: "pyproject.toml" python-version: "3.12" + - name: Set MACOSX_DEPLOYMENT_TARGET for Intel MacOS + if: matrix.os == 'macos-13' + run: >- + echo MACOSX_DEPLOYMENT_TARGET=10.12 >> $GITHUB_ENV - name: Disable scmtools local scheme if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- From cef85096b6bc9ff1889048b8993ba7555cb1c0e5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 22 Mar 2025 21:24:55 +0000 Subject: [PATCH 562/632] build(deps): bump pypa/cibuildwheel from 2.23.0 to 2.23.1 (#1315) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.23.0 to 2.23.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/v2.23.1/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.23.0...v2.23.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b28d8d5f..982c95dd 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -34,7 +34,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.23.0 + uses: pypa/cibuildwheel@v2.23.1 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 6d31b5ead584c84bde8f48bbd5b9c288687d5a23 Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Mon, 31 Mar 2025 22:12:13 -0600 Subject: [PATCH 563/632] use released version of setup-python (#1318) --- .github/workflows/ci.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 88e824bb..67333e3c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -59,8 +59,7 @@ jobs: with: fetch-depth: 0 persist-credentials: false - # TODO: update to tagged release when there is one - - uses: actions/setup-python@9e62be81b28222addecf85e47571213eb7680449 + - uses: actions/setup-python@v5 with: cache: pip cache-dependency-path: "pyproject.toml" @@ -163,8 +162,7 @@ jobs: - uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy - # TODO: update to tagged release when there is one - - uses: actions/setup-python@9e62be81b28222addecf85e47571213eb7680449 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: test From a4804cf07ea9a865a16068700aa3916ae09ff20c Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Wed, 2 Apr 2025 03:19:27 -0600 Subject: [PATCH 564/632] allow configuring empty formatter lists in codemod CLI (#1319) * allow configuring empty formatter lists * appease linter --- libcst/tool.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/libcst/tool.py b/libcst/tool.py index ace15ff6..ee4f3673 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -377,7 +377,10 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 command_instance = command_class(CodemodContext(), **codemod_args) # Sepcify target version for black formatter - if os.path.basename(config["formatter"][0]) in ("black", "black.exe"): + if any(config["formatter"]) and os.path.basename(config["formatter"][0]) in ( + "black", + "black.exe", + ): parsed_version = parse_version_string(args.python_version) config["formatter"] = [ From 11d6e36450f9062b74bf82f314d42456aabf5625 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 10:19:43 +0100 Subject: [PATCH 565/632] build(deps): bump pypa/cibuildwheel from 2.23.1 to 2.23.2 (#1317) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.23.1 to 2.23.2. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.23.1...v2.23.2) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 982c95dd..b694f83a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -34,7 +34,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.23.1 + uses: pypa/cibuildwheel@v2.23.2 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From b2406e799c312b6b080003786484e8bb1871f55b Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Mon, 19 May 2025 19:13:17 -0400 Subject: [PATCH 566/632] update pyo3 to 0.25 (#1324) * build(deps): bump pyo3 from 0.23.5 to 0.25.0 in /native Bumps [pyo3](https://github.com/pyo3/pyo3) from 0.23.5 to 0.25.0. - [Release notes](https://github.com/pyo3/pyo3/releases) - [Changelog](https://github.com/PyO3/pyo3/blob/main/CHANGELOG.md) - [Commits](https://github.com/pyo3/pyo3/compare/v0.23.5...v0.25.0) --- updated-dependencies: - dependency-name: pyo3 dependency-version: 0.25.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * update pyo3 to 0.24 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Amethyst Reese --- native/Cargo.lock | 25 ++++++++++++------------ native/libcst/Cargo.toml | 2 +- native/libcst/src/nodes/parser_config.rs | 3 ++- native/libcst_derive/src/into_py.rs | 8 ++++---- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 620ce4ea..c9e67cac 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -501,11 +501,10 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.23.5" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7778bffd85cf38175ac1f545509665d0b9b92a198ca7941f131f85f7a4f9a872" +checksum = "f239d656363bcee73afef85277f1b281e8ac6212a1d42aa90e55b90ed43c47a4" dependencies = [ - "cfg-if", "indoc", "libc", "memoffset 0.9.0", @@ -519,9 +518,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.23.5" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94f6cbe86ef3bf18998d9df6e0f3fc1050a8c5efa409bf712e661a4366e010fb" +checksum = "755ea671a1c34044fa165247aaf6f419ca39caa6003aee791a0df2713d8f1b6d" dependencies = [ "once_cell", "target-lexicon", @@ -529,9 +528,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.23.5" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9f1b4c431c0bb1c8fb0a338709859eed0d030ff6daa34368d3b152a63dfdd8d" +checksum = "fc95a2e67091e44791d4ea300ff744be5293f394f1bafd9f78c080814d35956e" dependencies = [ "libc", "pyo3-build-config", @@ -539,9 +538,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.23.5" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc2201328f63c4710f68abdf653c89d8dbc2858b88c5d88b0ff38a75288a9da" +checksum = "a179641d1b93920829a62f15e87c0ed791b6c8db2271ba0fd7c2686090510214" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -551,9 +550,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.23.5" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fca6726ad0f3da9c9de093d6f116a93c1a38e417ed73bf138472cf4064f72028" +checksum = "9dff85ebcaab8c441b0e3f7ae40a6963ecea8a9f5e74f647e33fcf5ec9a1e89e" dependencies = [ "heck", "proc-macro2", @@ -719,9 +718,9 @@ dependencies = [ [[package]] name = "target-lexicon" -version = "0.12.15" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4873307b7c257eddcb50c9bedf158eb669578359fb28428bef438fec8e6ba7c2" +checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" [[package]] name = "termcolor" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 2d3c999b..ae7821f9 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.15" -pyo3 = { version = "0.23", optional = true } +pyo3 = { version = "0.25", optional = true } thiserror = "1.0.63" peg = "0.8.4" annotate-snippets = "0.11.5" diff --git a/native/libcst/src/nodes/parser_config.rs b/native/libcst/src/nodes/parser_config.rs index 4a80a048..c10d86d5 100644 --- a/native/libcst/src/nodes/parser_config.rs +++ b/native/libcst/src/nodes/parser_config.rs @@ -125,7 +125,8 @@ fn parser_config_asdict<'py>(py: Python<'py>, config: PyRef<'py, ParserConfig>) ("version", config.version.clone_ref(py)), ("future_imports", config.future_imports.clone_ref(py)), ] - .into_py_dict_bound(py) + .into_py_dict(py) + .unwrap() } pub fn init_module(_py: Python, m: &PyModule) -> PyResult<()> { diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs index ac656cb4..f5470aa1 100644 --- a/native/libcst_derive/src/into_py.rs +++ b/native/libcst_derive/src/into_py.rs @@ -40,7 +40,7 @@ fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { Self::#varname { #(#fieldnames,)* .. } => { use pyo3::types::PyAnyMethods; - let libcst = pyo3::types::PyModule::import_bound(py, "libcst")?; + let libcst = pyo3::types::PyModule::import(py, "libcst")?; let kwargs = #kwargs_toks ; Ok(libcst .getattr(stringify!(#varname)) @@ -90,7 +90,7 @@ fn impl_into_py_struct(ast: &DeriveInput, e: &DataStruct) -> TokenStream { impl#generics crate::nodes::traits::py::TryIntoPy for #ident #generics { fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { use pyo3::types::PyAnyMethods; - let libcst = pyo3::types::PyModule::import_bound(py, "libcst")?; + let libcst = pyo3::types::PyModule::import(py, "libcst")?; let kwargs = #kwargs_toks ; Ok(libcst .getattr(stringify!(#ident)) @@ -165,7 +165,7 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt #(#optional_rust_varnames.map(|x| x.try_into_py(py)).transpose()?.map(|x| (stringify!(#optional_py_varnames), x)),)* }; if empty_kwargs { - quote! { pyo3::types::PyDict::new_bound(py) } + quote! { pyo3::types::PyDict::new(py) } } else { quote! { [ #kwargs_pairs #optional_pairs ] @@ -173,7 +173,7 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() - .into_py_dict_bound(py) + .into_py_dict(py)? } } } From 26139e72decb0d49725a2894834641c0507d3f7e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 19:14:19 -0400 Subject: [PATCH 567/632] build(deps): bump jinja2 from 3.1.5 to 3.1.6 (#1310) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.5 to 3.1.6. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.5...3.1.6) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 83f208e4..238bf46d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ dev-without-jupyter = [ "usort==1.0.8.post1", "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", - "jinja2==3.1.5", + "jinja2==3.1.6", ] [project.urls] From 64c761d48612912efe7136d0b98b064e371bb6e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 19:14:36 -0400 Subject: [PATCH 568/632] build(deps): bump flake8 from 7.1.2 to 7.2.0 (#1321) Bumps [flake8](https://github.com/pycqa/flake8) from 7.1.2 to 7.2.0. - [Commits](https://github.com/pycqa/flake8/compare/7.1.2...7.2.0) --- updated-dependencies: - dependency-name: flake8 dependency-version: 7.2.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 238bf46d..1d30b075 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ dev-without-jupyter = [ "coverage[toml]>=4.5.4", "build>=0.10.0", "fixit==2.1.0", - "flake8==7.1.2", + "flake8==7.2.0", "Sphinx>=5.1.1", "hypothesis>=4.36.0", "hypothesmith>=0.0.4", From 6e70e1cadc5b8b4066a48bd6be0f72fca32ea4fd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 23:51:23 +0000 Subject: [PATCH 569/632] build(deps): bump trybuild from 1.0.99 to 1.0.105 in /native (#1329) --- native/Cargo.lock | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index c9e67cac..f0e2e809 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -722,6 +722,12 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" +[[package]] +name = "target-triple" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790" + [[package]] name = "termcolor" version = "1.1.3" @@ -797,14 +803,15 @@ dependencies = [ [[package]] name = "trybuild" -version = "1.0.99" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "207aa50d36c4be8d8c6ea829478be44a372c6a77669937bb39c698e52f1491e8" +checksum = "1c9bf9513a2f4aeef5fdac8677d7d349c79fdbcc03b9c86da6e9d254f1e43be2" dependencies = [ "glob", "serde", "serde_derive", "serde_json", + "target-triple", "termcolor", "toml", ] From d3386b168f362e63b5b5665e0764a0021a927c90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 19:53:09 -0400 Subject: [PATCH 570/632] build(deps): bump astral-sh/setup-uv from 5 to 6 (#1327) Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 5 to 6. - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v5...v6) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/zizmor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml index 797b8d9b..26adcaf2 100644 --- a/.github/workflows/zizmor.yml +++ b/.github/workflows/zizmor.yml @@ -21,7 +21,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 - name: Run zizmor 🌈 run: uvx zizmor --format sarif . > results.sarif From be0b668d089f1e189ebd122f0f7beca31c8d8867 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 20:53:44 -0400 Subject: [PATCH 571/632] Bump black from 24.8.0 to 25.1.0 (#1290) * Bump black from 24.8.0 to 25.1.0 Bumps [black](https://github.com/psf/black) from 24.8.0 to 25.1.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/24.8.0...25.1.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Fix formatting and tests --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Amethyst Reese --- libcst/_parser/parso/tests/test_utils.py | 4 ++-- libcst/_parser/parso/utils.py | 6 +++--- libcst/codemod/commands/rename.py | 2 +- libcst/codemod/tests/test_codemod_cli.py | 2 +- libcst/matchers/_matcher_base.py | 6 +++--- libcst/metadata/scope_provider.py | 2 +- pyproject.toml | 2 +- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/libcst/_parser/parso/tests/test_utils.py b/libcst/_parser/parso/tests/test_utils.py index e033fe9d..1f548ef4 100644 --- a/libcst/_parser/parso/tests/test_utils.py +++ b/libcst/_parser/parso/tests/test_utils.py @@ -39,8 +39,8 @@ class ParsoUtilsTest(UnitTest): # Invalid line breaks ("a\vb", ["a\vb"], False), ("a\vb", ["a\vb"], True), - ("\x1C", ["\x1C"], False), - ("\x1C", ["\x1C"], True), + ("\x1c", ["\x1c"], False), + ("\x1c", ["\x1c"], True), ) ) def test_split_lines(self, string, expected_result, keepends): diff --git a/libcst/_parser/parso/utils.py b/libcst/_parser/parso/utils.py index 7814876c..54517123 100644 --- a/libcst/_parser/parso/utils.py +++ b/libcst/_parser/parso/utils.py @@ -29,9 +29,9 @@ from typing import Optional, Sequence, Tuple, Union _NON_LINE_BREAKS = ( "\v", # Vertical Tabulation 0xB "\f", # Form Feed 0xC - "\x1C", # File Separator - "\x1D", # Group Separator - "\x1E", # Record Separator + "\x1c", # File Separator + "\x1d", # Group Separator + "\x1e", # Record Separator "\x85", # Next Line (NEL - Equivalent to CR+LF. # Used to mark end-of-line on some IBM mainframes.) "\u2028", # Line Separator diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index 361e33e6..f1fb4693 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -15,7 +15,7 @@ from libcst.metadata import QualifiedNameProvider def leave_import_decorator( - method: Callable[..., Union[cst.Import, cst.ImportFrom]] + method: Callable[..., Union[cst.Import, cst.ImportFrom]], ) -> Callable[..., Union[cst.Import, cst.ImportFrom]]: # We want to record any 'as name' that is relevant but only after we leave the corresponding Import/ImportFrom node since # we don't want the 'as name' to interfere with children 'Name' and 'Attribute' nodes. diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 8773cd77..18dab870 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -44,7 +44,7 @@ class TestCodemodCLI(UnitTest): ) else: self.assertIn( - "error: cannot format -: Cannot parse: 13:10: async with AsyncExitStack() as stack:", + "error: cannot format -: Cannot parse for target version Python 3.6: 13:10: async with AsyncExitStack() as stack:", rlt.stderr.decode("utf-8"), ) diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 039694a5..2da2ff4c 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -800,7 +800,7 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): def ZeroOrMore( - matcher: Union[_MatcherT, DoNotCareSentinel] = DoNotCareSentinel.DEFAULT + matcher: Union[_MatcherT, DoNotCareSentinel] = DoNotCareSentinel.DEFAULT, ) -> AtLeastN[Union[_MatcherT, DoNotCareSentinel]]: """ Used as a convenience wrapper to :class:`AtLeastN` when ``n`` is equal to ``0``. @@ -903,7 +903,7 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): def ZeroOrOne( - matcher: Union[_MatcherT, DoNotCareSentinel] = DoNotCareSentinel.DEFAULT + matcher: Union[_MatcherT, DoNotCareSentinel] = DoNotCareSentinel.DEFAULT, ) -> AtMostN[Union[_MatcherT, DoNotCareSentinel]]: """ Used as a convenience wrapper to :class:`AtMostN` when ``n`` is equal to ``1``. @@ -1017,7 +1017,7 @@ def _matches_zero_nodes( MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, DoNotCareSentinel, - ] + ], ) -> bool: if isinstance(matcher, AtLeastN) and matcher.n == 0: return True diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index ac9c4b9d..909a55b2 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -778,7 +778,7 @@ class AnnotationScope(LocalScope): # Attribute(value=Name(value="a"), attr=Name(value="b")) -> ("a.b", "a") # each string has the corresponding CSTNode attached to it def _gen_dotted_names( - node: Union[cst.Attribute, cst.Name] + node: Union[cst.Attribute, cst.Name], ) -> Iterator[Tuple[str, Union[cst.Attribute, cst.Name]]]: if isinstance(node, cst.Name): yield node.value, node diff --git a/pyproject.toml b/pyproject.toml index 1d30b075..6bd6bad1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dev = [ "nbsphinx>=0.4.2", ] dev-without-jupyter = [ - "black==24.8.0", + "black==25.1.0", "coverage[toml]>=4.5.4", "build>=0.10.0", "fixit==2.1.0", From 9046fba2319dba40e7784eb4a18207b41287695a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 17:32:24 -0700 Subject: [PATCH 572/632] Bump regex from 1.10.6 to 1.11.1 in /native (#1233) Bumps [regex](https://github.com/rust-lang/regex) from 1.10.6 to 1.11.1. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.10.6...1.11.1) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 12 ++++++------ native/libcst/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index f0e2e809..ba2f86eb 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -592,9 +592,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.6" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", @@ -604,9 +604,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -615,9 +615,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustix" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index ae7821f9..8b90d85f 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -40,7 +40,7 @@ pyo3 = { version = "0.25", optional = true } thiserror = "1.0.63" peg = "0.8.4" annotate-snippets = "0.11.5" -regex = "1.10.6" +regex = "1.11.1" memchr = "2.7.4" libcst_derive = { path = "../libcst_derive", version = "1.6.0" } From b8fa7577497011ecb37fdd8904058d8312dcd876 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 17:32:31 -0700 Subject: [PATCH 573/632] Bump syn from 2.0.75 to 2.0.87 in /native (#1238) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.75 to 2.0.87. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.75...2.0.87) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index ba2f86eb..fb8ebae6 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -359,7 +359,7 @@ name = "libcst_derive" version = "1.7.0" dependencies = [ "quote", - "syn 2.0.75", + "syn 2.0.87", "trybuild", ] @@ -545,7 +545,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.75", + "syn 2.0.87", ] [[package]] @@ -558,7 +558,7 @@ dependencies = [ "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.75", + "syn 2.0.87", ] [[package]] @@ -670,7 +670,7 @@ checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.87", ] [[package]] @@ -707,9 +707,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.75" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", @@ -754,7 +754,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.87", ] [[package]] From 91a5d7efed875ae948da73f4e766f63d95801960 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 17:38:12 -0700 Subject: [PATCH 574/632] build(deps): bump pypa/cibuildwheel from 2.23.2 to 2.23.3 (#1328) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.23.2 to 2.23.3. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.23.2...v2.23.3) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-version: 2.23.3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b694f83a..65e8683d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -34,7 +34,7 @@ jobs: run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.23.2 + uses: pypa/cibuildwheel@v2.23.3 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 6cfabc9a80cabf27de5700e24f60bc557ed8d59d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 17:49:34 -0700 Subject: [PATCH 575/632] build(deps): bump thiserror from 1.0.63 to 2.0.12 in /native (#1308) Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.63 to 2.0.12. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.63...2.0.12) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 8 ++++---- native/libcst/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index fb8ebae6..bf9b91db 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -739,18 +739,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 8b90d85f..4182a6d1 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -37,7 +37,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.15" pyo3 = { version = "0.25", optional = true } -thiserror = "1.0.63" +thiserror = "2.0.12" peg = "0.8.4" annotate-snippets = "0.11.5" regex = "1.11.1" From 88457646b854046fac14fdedbf6a591141a3c4bf Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 21 May 2025 21:01:18 +0100 Subject: [PATCH 576/632] ci: build windows arm64 wheels (#1304) --- .github/workflows/build.yml | 11 ++++++++++- pyproject.toml | 3 +-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 65e8683d..429e4b82 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,7 +11,15 @@ jobs: fail-fast: false matrix: # macos-13 is an intel runner, macos-latest is apple silicon - os: [macos-13, macos-latest, ubuntu-latest, ubuntu-24.04-arm, windows-latest] + os: + [ + macos-13, + macos-latest, + ubuntu-latest, + ubuntu-24.04-arm, + windows-latest, + windows-11-arm, + ] env: SCCACHE_VERSION: 0.2.13 GITHUB_WORKSPACE: "${{github.workspace}}" @@ -25,6 +33,7 @@ jobs: cache: pip cache-dependency-path: "pyproject.toml" python-version: "3.12" + - uses: dtolnay/rust-toolchain@stable # zizmor: ignore[unpinned-uses] - name: Set MACOSX_DEPLOYMENT_TARGET for Intel MacOS if: matrix.os == 'macos-13' run: >- diff --git a/pyproject.toml b/pyproject.toml index 6bd6bad1..6bf33d40 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,7 +90,6 @@ environment = { PATH = "$PATH:$HOME/.cargo/bin", LIBCST_NO_LOCAL_SCHEME="1" } skip = [ "pp*", "*-win32", - "*-win_arm64", "*-musllinux_i686", "*-musllinux_ppc64le", "*-musllinux_s390x", @@ -105,4 +104,4 @@ before-all = "yum install -y libatomic; curl https://sh.rustup.rs -sSf | env -u before-all = "rustup target add aarch64-apple-darwin x86_64-apple-darwin" [tool.cibuildwheel.windows] -before-all = "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" +before-all = "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc aarch64-pc-windows-msvc" From d002c14d6b7a7121713bd7f30a73a84f4b2b4c1c Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 22 May 2025 08:18:20 +0100 Subject: [PATCH 577/632] Replace multiprocessing with ProcessPoolExecutor (#1294) Instead of relying on `multiprocessing.Pool`, this PR replaces the implementation of `parallel_exec_transform_with_prettyprint` with `concurrent.futures.ProcessPoolExecutor` --- libcst/codemod/_cli.py | 20 ++++++++------- libcst/codemod/_dummy_pool.py | 47 ++++++++++++++++++++++------------- 2 files changed, 41 insertions(+), 26 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 2481bf9d..d091ad8b 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -14,16 +14,17 @@ import subprocess import sys import time import traceback +from concurrent.futures import as_completed, Executor, ProcessPoolExecutor from copy import deepcopy from dataclasses import dataclass, replace -from multiprocessing import cpu_count, Pool +from multiprocessing import cpu_count from pathlib import Path from typing import Any, AnyStr, cast, Dict, List, Optional, Sequence, Union from libcst import parse_module, PartialParserConfig from libcst.codemod._codemod import Codemod from libcst.codemod._context import CodemodContext -from libcst.codemod._dummy_pool import DummyPool +from libcst.codemod._dummy_pool import DummyExecutor from libcst.codemod._runner import ( SkipFile, SkipReason, @@ -607,13 +608,14 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 python_version=python_version, ) + pool_impl: type[Executor] if total == 1 or jobs == 1: # Simple case, we should not pay for process overhead. - # Let's just use a dummy synchronous pool. + # Let's just use a dummy synchronous executor. jobs = 1 - pool_impl = DummyPool + pool_impl = DummyExecutor else: - pool_impl = Pool + pool_impl = ProcessPoolExecutor # Warm the parser, pre-fork. parse_module( "", @@ -629,7 +631,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 warnings: int = 0 skips: int = 0 - with pool_impl(processes=jobs) as p: # type: ignore + with pool_impl(max_workers=jobs) as executor: # type: ignore args = [ { "transformer": transform, @@ -640,9 +642,9 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 for filename in files ] try: - for result in p.imap_unordered( - _execute_transform_wrap, args, chunksize=chunksize - ): + futures = [executor.submit(_execute_transform_wrap, arg) for arg in args] + for future in as_completed(futures): + result = future.result() # Print an execution result, keep track of failures _print_parallel_result( result, diff --git a/libcst/codemod/_dummy_pool.py b/libcst/codemod/_dummy_pool.py index c4a24932..34c911bd 100644 --- a/libcst/codemod/_dummy_pool.py +++ b/libcst/codemod/_dummy_pool.py @@ -3,37 +3,50 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import sys +from concurrent.futures import Executor, Future from types import TracebackType -from typing import Callable, Generator, Iterable, Optional, Type, TypeVar +from typing import Callable, Optional, Type, TypeVar -RetT = TypeVar("RetT") -ArgT = TypeVar("ArgT") +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +Return = TypeVar("Return") +Params = ParamSpec("Params") -class DummyPool: +class DummyExecutor(Executor): """ - Synchronous dummy `multiprocessing.Pool` analogue. + Synchronous dummy `concurrent.futures.Executor` analogue. """ - def __init__(self, processes: Optional[int] = None) -> None: + def __init__(self, max_workers: Optional[int] = None) -> None: pass - def imap_unordered( + def submit( self, - func: Callable[[ArgT], RetT], - iterable: Iterable[ArgT], - chunksize: Optional[int] = None, - ) -> Generator[RetT, None, None]: - for args in iterable: - yield func(args) + fn: Callable[Params, Return], + /, + *args: Params.args, + **kwargs: Params.kwargs, + ) -> Future[Return]: + future: Future[Return] = Future() + try: + result = fn(*args, **kwargs) + future.set_result(result) + except Exception as exc: + future.set_exception(exc) + return future - def __enter__(self) -> "DummyPool": + def __enter__(self) -> "DummyExecutor": return self def __exit__( self, - exc_type: Optional[Type[Exception]], - exc: Optional[Exception], - tb: Optional[TracebackType], + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], ) -> None: pass From 52acdf416313012501d31dc004586d5dc0011359 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 25 May 2025 09:23:10 +0100 Subject: [PATCH 578/632] cli: Instantiate Codemods per file (#1334) Instead of sharing instances of a Codemod across many files, this PR allows passing in a Codemod class to `parallel_exec_transform_with_prettyprint` which will then instantiate the Codemod for each file. `tool._codemod_impl` now starts using this API. The old behavior is deprecated, because sharing codemod instances across files is a surprising behavior, and causes hard-to-diagnose bugs when a Codemod keeps track of its state via instance variables. --- libcst/codemod/_cli.py | 279 +++++++++++++++++++++------------------ libcst/tests/test_e2e.py | 75 ++++++----- libcst/tool.py | 7 +- 3 files changed, 192 insertions(+), 169 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index d091ad8b..71143d69 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -16,10 +16,11 @@ import time import traceback from concurrent.futures import as_completed, Executor, ProcessPoolExecutor from copy import deepcopy -from dataclasses import dataclass, replace +from dataclasses import dataclass from multiprocessing import cpu_count from pathlib import Path -from typing import Any, AnyStr, cast, Dict, List, Optional, Sequence, Union +from typing import AnyStr, cast, Dict, List, Optional, Sequence, Type, Union +from warnings import warn from libcst import parse_module, PartialParserConfig from libcst.codemod._codemod import Codemod @@ -213,12 +214,52 @@ class ExecutionConfig: unified_diff: Optional[int] = None -def _execute_transform( # noqa: C901 - transformer: Codemod, +def _prepare_context( + repo_root: str, filename: str, - config: ExecutionConfig, scratch: Dict[str, object], -) -> ExecutionResult: + repo_manager: Optional[FullRepoManager], +) -> CodemodContext: + # determine the module and package name for this file + try: + module_name_and_package = calculate_module_and_package(repo_root, filename) + mod_name = module_name_and_package.name + pkg_name = module_name_and_package.package + except ValueError as ex: + print(f"Failed to determine module name for {filename}: {ex}", file=sys.stderr) + mod_name = None + pkg_name = None + return CodemodContext( + scratch=scratch, + filename=filename, + full_module_name=mod_name, + full_package_name=pkg_name, + metadata_manager=repo_manager, + ) + + +def _instantiate_transformer( + transformer: Union[Codemod, Type[Codemod]], + repo_root: str, + filename: str, + original_scratch: Dict[str, object], + codemod_kwargs: Dict[str, object], + repo_manager: Optional[FullRepoManager], +) -> Codemod: + if isinstance(transformer, type): + return transformer( # type: ignore + context=_prepare_context(repo_root, filename, {}, repo_manager), + **codemod_kwargs, + ) + transformer.context = _prepare_context( + repo_root, filename, deepcopy(original_scratch), repo_manager + ) + return transformer + + +def _check_for_skip( + filename: str, config: ExecutionConfig +) -> Union[ExecutionResult, bytes]: for pattern in config.blacklist_patterns: if re.fullmatch(pattern, filename): return ExecutionResult( @@ -230,45 +271,46 @@ def _execute_transform( # noqa: C901 ), ) - try: - with open(filename, "rb") as fp: - oldcode = fp.read() + with open(filename, "rb") as fp: + oldcode = fp.read() - # Skip generated files - if ( - not config.include_generated - and config.generated_code_marker.encode("utf-8") in oldcode - ): - return ExecutionResult( - filename=filename, - changed=False, - transform_result=TransformSkip( - skip_reason=SkipReason.GENERATED, - skip_description="Generated file.", - ), - ) - - # determine the module and package name for this file - try: - module_name_and_package = calculate_module_and_package( - config.repo_root or ".", filename - ) - mod_name = module_name_and_package.name - pkg_name = module_name_and_package.package - except ValueError as ex: - print( - f"Failed to determine module name for {filename}: {ex}", file=sys.stderr - ) - mod_name = None - pkg_name = None - - # Apart from metadata_manager, every field of context should be reset per file - transformer.context = CodemodContext( - scratch=deepcopy(scratch), + # Skip generated files + if ( + not config.include_generated + and config.generated_code_marker.encode("utf-8") in oldcode + ): + return ExecutionResult( filename=filename, - full_module_name=mod_name, - full_package_name=pkg_name, - metadata_manager=transformer.context.metadata_manager, + changed=False, + transform_result=TransformSkip( + skip_reason=SkipReason.GENERATED, + skip_description="Generated file.", + ), + ) + return oldcode + + +def _execute_transform( + transformer: Union[Codemod, Type[Codemod]], + filename: str, + config: ExecutionConfig, + original_scratch: Dict[str, object], + codemod_args: Optional[Dict[str, object]], + repo_manager: Optional[FullRepoManager], +) -> ExecutionResult: + warnings: list[str] = [] + try: + oldcode = _check_for_skip(filename, config) + if isinstance(oldcode, ExecutionResult): + return oldcode + + transformer_instance = _instantiate_transformer( + transformer, + config.repo_root or ".", + filename, + original_scratch, + codemod_args or {}, + repo_manager, ) # Run the transform, bail if we failed or if we aren't formatting code @@ -281,55 +323,26 @@ def _execute_transform( # noqa: C901 else PartialParserConfig() ), ) - output_tree = transformer.transform_module(input_tree) + output_tree = transformer_instance.transform_module(input_tree) newcode = output_tree.bytes encoding = output_tree.encoding - except KeyboardInterrupt: - return ExecutionResult( - filename=filename, changed=False, transform_result=TransformExit() - ) + warnings.extend(transformer_instance.context.warnings) except SkipFile as ex: + warnings.extend(transformer_instance.context.warnings) return ExecutionResult( filename=filename, changed=False, transform_result=TransformSkip( skip_reason=SkipReason.OTHER, skip_description=str(ex), - warning_messages=transformer.context.warnings, - ), - ) - except Exception as ex: - return ExecutionResult( - filename=filename, - changed=False, - transform_result=TransformFailure( - error=ex, - traceback_str=traceback.format_exc(), - warning_messages=transformer.context.warnings, + warning_messages=warnings, ), ) # Call formatter if needed, but only if we actually changed something in this # file if config.format_code and newcode != oldcode: - try: - newcode = invoke_formatter(config.formatter_args, newcode) - except KeyboardInterrupt: - return ExecutionResult( - filename=filename, - changed=False, - transform_result=TransformExit(), - ) - except Exception as ex: - return ExecutionResult( - filename=filename, - changed=False, - transform_result=TransformFailure( - error=ex, - traceback_str=traceback.format_exc(), - warning_messages=transformer.context.warnings, - ), - ) + newcode = invoke_formatter(config.formatter_args, newcode) # Format as unified diff if needed, otherwise save it back changed = oldcode != newcode @@ -352,13 +365,14 @@ def _execute_transform( # noqa: C901 return ExecutionResult( filename=filename, changed=changed, - transform_result=TransformSuccess( - warning_messages=transformer.context.warnings, code=newcode - ), + transform_result=TransformSuccess(warning_messages=warnings, code=newcode), ) + except KeyboardInterrupt: return ExecutionResult( - filename=filename, changed=False, transform_result=TransformExit() + filename=filename, + changed=False, + transform_result=TransformExit(warning_messages=warnings), ) except Exception as ex: return ExecutionResult( @@ -367,7 +381,7 @@ def _execute_transform( # noqa: C901 transform_result=TransformFailure( error=ex, traceback_str=traceback.format_exc(), - warning_messages=transformer.context.warnings, + warning_messages=warnings, ), ) @@ -504,15 +518,8 @@ class ParallelTransformResult: skips: int -# Unfortunate wrapper required since there is no `istarmap_unordered`... -def _execute_transform_wrap( - job: Dict[str, Any], -) -> ExecutionResult: - return _execute_transform(**job) - - def parallel_exec_transform_with_prettyprint( # noqa: C901 - transform: Codemod, + transform: Union[Codemod, Type[Codemod]], files: Sequence[str], *, jobs: Optional[int] = None, @@ -528,38 +535,49 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 blacklist_patterns: Sequence[str] = (), python_version: Optional[str] = None, repo_root: Optional[str] = None, + codemod_args: Optional[Dict[str, object]] = None, ) -> ParallelTransformResult: """ - Given a list of files and an instantiated codemod we should apply to them, - fork and apply the codemod in parallel to all of the files, including any - configured formatter. The ``jobs`` parameter controls the maximum number of - in-flight transforms, and needs to be at least 1. If not included, the number - of jobs will automatically be set to the number of CPU cores. If ``unified_diff`` - is set to a number, changes to files will be printed to stdout with - ``unified_diff`` lines of context. If it is set to ``None`` or left out, files - themselves will be updated with changes and formatting. If a - ``python_version`` is provided, then we will parse each source file using - this version. Otherwise, we will use the version of the currently executing python + Given a list of files and a codemod we should apply to them, fork and apply the + codemod in parallel to all of the files, including any configured formatter. The + ``jobs`` parameter controls the maximum number of in-flight transforms, and needs to + be at least 1. If not included, the number of jobs will automatically be set to the + number of CPU cores. If ``unified_diff`` is set to a number, changes to files will + be printed to stdout with ``unified_diff`` lines of context. If it is set to + ``None`` or left out, files themselves will be updated with changes and formatting. + If a ``python_version`` is provided, then we will parse each source file using this + version. Otherwise, we will use the version of the currently executing python binary. - A progress indicator as well as any generated warnings will be printed to stderr. - To supress the interactive progress indicator, set ``hide_progress`` to ``True``. - Files that include the generated code marker will be skipped unless the - ``include_generated`` parameter is set to ``True``. Similarly, files that match - a supplied blacklist of regex patterns will be skipped. Warnings for skipping - both blacklisted and generated files will be printed to stderr along with - warnings generated by the codemod unless ``hide_blacklisted`` and - ``hide_generated`` are set to ``True``. Files that were successfully codemodded - will not be printed to stderr unless ``show_successes`` is set to ``True``. + A progress indicator as well as any generated warnings will be printed to stderr. To + supress the interactive progress indicator, set ``hide_progress`` to ``True``. Files + that include the generated code marker will be skipped unless the + ``include_generated`` parameter is set to ``True``. Similarly, files that match a + supplied blacklist of regex patterns will be skipped. Warnings for skipping both + blacklisted and generated files will be printed to stderr along with warnings + generated by the codemod unless ``hide_blacklisted`` and ``hide_generated`` are set + to ``True``. Files that were successfully codemodded will not be printed to stderr + unless ``show_successes`` is set to ``True``. - To make this API possible, we take an instantiated transform. This is due to - the fact that lambdas are not pickleable and pickling functions is undefined. - This means we're implicitly relying on fork behavior on UNIX-like systems, and - this function will not work on Windows systems. To create a command-line utility - that runs on Windows, please instead see - :func:`~libcst.codemod.exec_transform_with_prettyprint`. + We take a :class:`~libcst.codemod._codemod.Codemod` class, or an instantiated + :class:`~libcst.codemod._codemod.Codemod`. In the former case, the codemod will be + instantiated for each file, with ``codemod_args`` passed in to the constructor. + Passing an already instantiated :class:`~libcst.codemod._codemod.Codemod` is + deprecated, because it leads to sharing of the + :class:`~libcst.codemod._codemod.Codemod` instance across files, which is a common + source of hard-to-track-down bugs when the :class:`~libcst.codemod._codemod.Codemod` + tracks its state on the instance. """ + if isinstance(transform, Codemod): + warn( + "Passing transformer instances to `parallel_exec_transform_with_prettyprint` " + "is deprecated and will break in a future version. " + "Please pass the transformer class instead.", + DeprecationWarning, + stacklevel=2, + ) + # Ensure that we have no duplicates, otherwise we might get race conditions # on write. files = sorted({os.path.abspath(f) for f in files}) @@ -579,6 +597,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 if total == 0: return ParallelTransformResult(successes=0, failures=0, skips=0, warnings=0) + metadata_manager: Optional[FullRepoManager] = None if repo_root is not None: # Make sure if there is a root that we have the absolute path to it. repo_root = os.path.abspath(repo_root) @@ -591,10 +610,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 transform.get_inherited_dependencies(), ) metadata_manager.resolve_cache() - transform.context = replace( - transform.context, - metadata_manager=metadata_manager, - ) + print("Executing codemod...", file=sys.stderr) config = ExecutionConfig( @@ -630,19 +646,24 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 failures: int = 0 warnings: int = 0 skips: int = 0 + original_scratch = ( + deepcopy(transform.context.scratch) if isinstance(transform, Codemod) else {} + ) with pool_impl(max_workers=jobs) as executor: # type: ignore - args = [ - { - "transformer": transform, - "filename": filename, - "config": config, - "scratch": transform.context.scratch, - } - for filename in files - ] try: - futures = [executor.submit(_execute_transform_wrap, arg) for arg in args] + futures = [ + executor.submit( + _execute_transform, + transformer=transform, + filename=filename, + config=config, + original_scratch=original_scratch, + codemod_args=codemod_args, + repo_manager=metadata_manager, + ) + for filename in files + ] for future in as_completed(futures): result = future.result() # Print an execution result, keep track of failures diff --git a/libcst/tests/test_e2e.py b/libcst/tests/test_e2e.py index 6c106b36..e6dfdb5c 100644 --- a/libcst/tests/test_e2e.py +++ b/libcst/tests/test_e2e.py @@ -48,42 +48,45 @@ def temp_workspace() -> Generator[Path, None, None]: class ToolE2ETest(TestCase): def test_leaky_codemod(self) -> None: - with temp_workspace() as tmp: - # File to trigger codemod - example: Path = tmp / "example.py" - example.write_text("""print("Hello")""") - # File that should not be modified - other = tmp / "other.py" - other.touch() - # Just a dir named "dir.py", should be ignored - adir = tmp / "dir.py" - adir.mkdir() + for msg, command in [ + ("instantiated", PrintToPPrintCommand(CodemodContext())), + ("class", PrintToPPrintCommand), + ]: + with self.subTest(msg), temp_workspace() as tmp: + # File to trigger codemod + example: Path = tmp / "example.py" + example.write_text("""print("Hello")""") + # File that should not be modified + other = tmp / "other.py" + other.touch() + # Just a dir named "dir.py", should be ignored + adir = tmp / "dir.py" + adir.mkdir() - # Run command - command_instance = PrintToPPrintCommand(CodemodContext()) - files = gather_files(".") - result = parallel_exec_transform_with_prettyprint( - command_instance, - files, - format_code=False, - hide_progress=True, - ) + # Run command + files = gather_files(".") + result = parallel_exec_transform_with_prettyprint( + command, + files, + format_code=False, + hide_progress=True, + ) - print(result) + print(result) - # Check results - self.assertEqual(2, result.successes) - self.assertEqual(0, result.skips) - self.assertEqual(0, result.failures) - # Expect example.py to be modified - self.assertIn( - "from pprint import pprint", - example.read_text(), - "import missing in example.py", - ) - # Expect other.py to NOT be modified - self.assertNotIn( - "from pprint import pprint", - other.read_text(), - "import found in other.py", - ) + # Check results + self.assertEqual(2, result.successes) + self.assertEqual(0, result.skips) + self.assertEqual(0, result.failures) + # Expect example.py to be modified + self.assertIn( + "from pprint import pprint", + example.read_text(), + "import missing in example.py", + ) + # Expect other.py to NOT be modified + self.assertNotIn( + "from pprint import pprint", + other.read_text(), + "import found in other.py", + ) diff --git a/libcst/tool.py b/libcst/tool.py index ee4f3673..2a04a172 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -374,8 +374,6 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 "unified_diff", } } - command_instance = command_class(CodemodContext(), **codemod_args) - # Sepcify target version for black formatter if any(config["formatter"]) and os.path.basename(config["formatter"][0]) in ( "black", @@ -398,7 +396,7 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 print("Codemodding from stdin", file=sys.stderr) oldcode = sys.stdin.read() newcode = exec_transform_with_prettyprint( - command_instance, + command_class(CodemodContext(), **codemod_args), # type: ignore oldcode, include_generated=args.include_generated, generated_code_marker=config["generated_code_marker"], @@ -421,7 +419,7 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 files = gather_files(args.path, include_stubs=args.include_stubs) try: result = parallel_exec_transform_with_prettyprint( - command_instance, + command_class, files, jobs=args.jobs, unified_diff=args.unified_diff, @@ -436,6 +434,7 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 blacklist_patterns=config["blacklist_patterns"], python_version=args.python_version, repo_root=config["repo_root"], + codemod_args=codemod_args, ) except KeyboardInterrupt: print("Interrupted!", file=sys.stderr) From 16ed48d74bb33bb59c18ffa6f31705119044a621 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 25 May 2025 11:43:18 +0100 Subject: [PATCH 579/632] Enable support for free-threading (#1295) This PR: 1. marks the `libcst.native` module as free-threading-compatible 2. replaces the use of ProcessPoolExecutor with ThreadPoolExecutor if free-threaded CPython is detected at runtime --- .github/workflows/ci.yml | 56 ++++---------------------------- libcst/codemod/_cli.py | 17 +++++++--- libcst/codemod/_dummy_pool.py | 3 -- native/libcst/src/py.rs | 2 +- pyproject.toml | 61 ++++++++++++++++++----------------- 5 files changed, 52 insertions(+), 87 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67333e3c..75abc888 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,8 +13,13 @@ jobs: fail-fast: false matrix: os: [macos-latest, ubuntu-latest, windows-latest] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.13t"] steps: + - uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install hatch + run: pip install -U hatch - uses: actions/checkout@v4 with: fetch-depth: 0 @@ -24,9 +29,6 @@ jobs: cache: pip cache-dependency-path: "pyproject.toml" python-version: ${{ matrix.python-version }} - - name: Install hatch - run: | - pip install -U hatch - uses: actions-rs/toolchain@v1 with: toolchain: stable @@ -44,50 +46,6 @@ jobs: hatch run coverage combine .coverage.pure hatch run coverage report - # TODO: - # merge into regular CI once hatch has support for creating environments on - # the free-threaded build: https://github.com/pypa/hatch/issues/1931 - free-threaded-tests: - name: "test (${{ matrix.os }}, 3.13t)" - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [macos-latest, ubuntu-latest, windows-latest] - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - persist-credentials: false - - uses: actions/setup-python@v5 - with: - cache: pip - cache-dependency-path: "pyproject.toml" - python-version: '3.13t' - - name: Build LibCST - run: | - # Install build-system.requires dependencies - pip install setuptools setuptools-scm setuptools-rust wheel - # Jupyter is annoying to install on free-threaded Python - pip install -e .[dev-without-jupyter] - - name: Native Parser Tests - # TODO: remove when native modules declare free-threaded support - env: - PYTHON_GIL: '0' - run: | - python -m coverage run -m libcst.tests - - name: Pure Parser Tests - env: - COVERAGE_FILE: .coverage.pure - LIBCST_PARSER_TYPE: pure - run: | - python -m coverage run -m libcst.tests - - name: Coverage - run: | - python -m coverage combine .coverage.pure - python -m coverage report - - # Run linters lint: runs-on: ubuntu-latest @@ -139,7 +97,7 @@ jobs: - name: Install hatch run: pip install -U hatch - uses: ts-graphviz/setup-graphviz@v2 - - run: hatch run docs + - run: hatch run docs:docs - name: Archive Docs uses: actions/upload-artifact@v4 with: diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 71143d69..178cabcb 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -8,18 +8,19 @@ Provides helpers for CLI interaction. """ import difflib +import functools import os.path import re import subprocess import sys import time import traceback -from concurrent.futures import as_completed, Executor, ProcessPoolExecutor +from concurrent.futures import as_completed, Executor from copy import deepcopy from dataclasses import dataclass from multiprocessing import cpu_count from pathlib import Path -from typing import AnyStr, cast, Dict, List, Optional, Sequence, Type, Union +from typing import AnyStr, Callable, cast, Dict, List, Optional, Sequence, Type, Union from warnings import warn from libcst import parse_module, PartialParserConfig @@ -624,14 +625,20 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 python_version=python_version, ) - pool_impl: type[Executor] + pool_impl: Callable[[], Executor] if total == 1 or jobs == 1: # Simple case, we should not pay for process overhead. # Let's just use a dummy synchronous executor. jobs = 1 pool_impl = DummyExecutor + elif getattr(sys, "_is_gil_enabled", lambda: False)(): # pyre-ignore[16] + from concurrent.futures import ThreadPoolExecutor + + pool_impl = functools.partial(ThreadPoolExecutor, max_workers=jobs) else: - pool_impl = ProcessPoolExecutor + from concurrent.futures import ProcessPoolExecutor + + pool_impl = functools.partial(ProcessPoolExecutor, max_workers=jobs) # Warm the parser, pre-fork. parse_module( "", @@ -650,7 +657,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 deepcopy(transform.context.scratch) if isinstance(transform, Codemod) else {} ) - with pool_impl(max_workers=jobs) as executor: # type: ignore + with pool_impl() as executor: # type: ignore try: futures = [ executor.submit( diff --git a/libcst/codemod/_dummy_pool.py b/libcst/codemod/_dummy_pool.py index 34c911bd..aa23a7d4 100644 --- a/libcst/codemod/_dummy_pool.py +++ b/libcst/codemod/_dummy_pool.py @@ -22,9 +22,6 @@ class DummyExecutor(Executor): Synchronous dummy `concurrent.futures.Executor` analogue. """ - def __init__(self, max_workers: Optional[int] = None) -> None: - pass - def submit( self, fn: Callable[Params, Return], diff --git a/native/libcst/src/py.rs b/native/libcst/src/py.rs index bd7dfe6d..57da11e7 100644 --- a/native/libcst/src/py.rs +++ b/native/libcst/src/py.rs @@ -6,7 +6,7 @@ use crate::nodes::traits::py::TryIntoPy; use pyo3::prelude::*; -#[pymodule] +#[pymodule(gil_used = false)] #[pyo3(name = "native")] pub fn libcst_native(_py: Python, m: &Bound) -> PyResult<()> { #[pyfn(m)] diff --git a/pyproject.toml b/pyproject.toml index 6bf33d40..bdeb4660 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,33 +18,9 @@ classifiers = [ "Typing :: Typed", ] requires-python = ">=3.9" -dependencies = ["pyyaml>=5.2"] - -[project.optional-dependencies] -dev = [ - "libcst[dev-without-jupyter]", - "jupyter>=1.0.0", - "nbsphinx>=0.4.2", -] -dev-without-jupyter = [ - "black==25.1.0", - "coverage[toml]>=4.5.4", - "build>=0.10.0", - "fixit==2.1.0", - "flake8==7.2.0", - "Sphinx>=5.1.1", - "hypothesis>=4.36.0", - "hypothesmith>=0.0.4", - "maturin>=1.7.0,<1.8", - "prompt-toolkit>=2.0.9", - "pyre-check==0.9.18; platform_system != 'Windows'", - "setuptools_scm>=6.0.1", - "sphinx-rtd-theme>=0.4.3", - "ufmt==2.8.0", - "usort==1.0.8.post1", - "setuptools-rust>=1.5.2", - "slotscheck>=0.7.1", - "jinja2==3.1.6", +dependencies = [ + "pyyaml>=5.2; python_version < '3.13'", + "pyyaml-ft; python_version >= '3.13'", ] [project.urls] @@ -63,10 +39,26 @@ show_missing = true skip_covered = true [tool.hatch.envs.default] -features = ["dev"] +installer = "uv" +dependencies = [ + "black==25.1.0", + "coverage[toml]>=4.5.4", + "build>=0.10.0", + "fixit==2.1.0", + "flake8==7.2.0", + "hypothesis>=4.36.0", + "hypothesmith>=0.0.4", + "maturin>=1.7.0,<1.8", + "prompt-toolkit>=2.0.9", + "pyre-check==0.9.18; platform_system != 'Windows'", + "setuptools_scm>=6.0.1", + "ufmt==2.8.0", + "usort==1.0.8.post1", + "setuptools-rust>=1.5.2", + "slotscheck>=0.7.1", +] [tool.hatch.envs.default.scripts] -docs = "sphinx-build -ab html docs/source docs/build" fixtures = ["python scripts/regenerate-fixtures.py", "git diff --exit-code"] format = "ufmt format libcst scripts" lint = [ @@ -78,6 +70,17 @@ lint = [ test = ["python --version", "python -m coverage run -m libcst.tests"] typecheck = ["pyre --version", "pyre check"] +[tool.hatch.envs.docs] +extra-dependencies = [ + "Sphinx>=5.1.1", + "sphinx-rtd-theme>=0.4.3", + "jupyter>=1.0.0", + "nbsphinx>=0.4.2", + "jinja2==3.1.6", +] +[tool.hatch.envs.docs.scripts] +docs = "sphinx-build -ab html docs/source docs/build" + [tool.slotscheck] exclude-modules = '^libcst\.(testing|tests)' From c224665ed767f2b3c0be0a4c16a14c54be65842d Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 25 May 2025 11:44:16 +0100 Subject: [PATCH 580/632] ci: start building cp313t wheels (#1333) Closes #1242. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index bdeb4660..27dbb710 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,6 +98,7 @@ skip = [ "*-musllinux_s390x", "*-musllinux_armv7l", ] +enable = ["cpython-freethreading"] [tool.cibuildwheel.linux] environment-pass = ["LIBCST_NO_LOCAL_SCHEME"] From b560ae815c64bd83bfc635cbaac4fa427de933a7 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 25 May 2025 20:13:12 +0100 Subject: [PATCH 581/632] Threadpool should be used if GIL is disabled. (#1335) --- libcst/codemod/_cli.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 178cabcb..0cdec103 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -631,11 +631,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 # Let's just use a dummy synchronous executor. jobs = 1 pool_impl = DummyExecutor - elif getattr(sys, "_is_gil_enabled", lambda: False)(): # pyre-ignore[16] - from concurrent.futures import ThreadPoolExecutor - - pool_impl = functools.partial(ThreadPoolExecutor, max_workers=jobs) - else: + elif getattr(sys, "_is_gil_enabled", lambda: True)(): # pyre-ignore[16] from concurrent.futures import ProcessPoolExecutor pool_impl = functools.partial(ProcessPoolExecutor, max_workers=jobs) @@ -648,6 +644,10 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 else PartialParserConfig() ), ) + else: + from concurrent.futures import ThreadPoolExecutor + + pool_impl = functools.partial(ThreadPoolExecutor, max_workers=jobs) successes: int = 0 failures: int = 0 From 3dc2289bf6e21f36fb461e3ed4cf792994a87bb1 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 26 May 2025 08:40:54 +0100 Subject: [PATCH 582/632] codegen: Support pipe syntax for Union types (#1336) From 3.14 onwards, we'll get `foo | bar` instead of `typing.Union[foo, bar]` as the annotation for union types (including optional). This PR prepares the codegen script for this. --- libcst/codegen/gen_matcher_classes.py | 116 ++++++++++++++++++++- libcst/codegen/tests/test_codegen_clean.py | 68 ++++++++++-- 2 files changed, 174 insertions(+), 10 deletions(-) diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index b7940f97..7f3cb6f8 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -16,6 +16,109 @@ CLASS_RE = r"" OPTIONAL_RE = r"typing\.Union\[([^,]*?), NoneType]" +class NormalizeUnions(cst.CSTTransformer): + """ + Convert a binary operation with | operators into a Union type. + For example, converts `foo | bar | baz` into `typing.Union[foo, bar, baz]`. + Special case: converts `foo | None` or `None | foo` into `typing.Optional[foo]`. + Also flattens nested typing.Union types. + """ + + def leave_Subscript( + self, original_node: cst.Subscript, updated_node: cst.Subscript + ) -> cst.Subscript: + # Check if this is a typing.Union + if ( + isinstance(updated_node.value, cst.Attribute) + and isinstance(updated_node.value.value, cst.Name) + and updated_node.value.attr.value == "Union" + and updated_node.value.value.value == "typing" + ): + # Collect all operands from any nested Unions + operands: List[cst.BaseExpression] = [] + for slc in updated_node.slice: + if not isinstance(slc.slice, cst.Index): + continue + value = slc.slice.value + # If this is a nested Union, add its elements + if ( + isinstance(value, cst.Subscript) + and isinstance(value.value, cst.Attribute) + and isinstance(value.value.value, cst.Name) + and value.value.attr.value == "Union" + and value.value.value.value == "typing" + ): + operands.extend( + nested_slc.slice.value + for nested_slc in value.slice + if isinstance(nested_slc.slice, cst.Index) + ) + else: + operands.append(value) + + # flatten operands into a Union type + return cst.Subscript( + cst.Attribute(cst.Name("typing"), cst.Name("Union")), + [cst.SubscriptElement(cst.Index(operand)) for operand in operands], + ) + return updated_node + + def leave_BinaryOperation( + self, original_node: cst.BinaryOperation, updated_node: cst.BinaryOperation + ) -> Union[cst.BinaryOperation, cst.Subscript]: + if not updated_node.operator.deep_equals(cst.BitOr()): + return updated_node + + def flatten_binary_op(node: cst.BaseExpression) -> List[cst.BaseExpression]: + """Flatten a binary operation tree into a list of operands.""" + if not isinstance(node, cst.BinaryOperation): + # If it's a Union type, extract its elements + if ( + isinstance(node, cst.Subscript) + and isinstance(node.value, cst.Attribute) + and isinstance(node.value.value, cst.Name) + and node.value.attr.value == "Union" + and node.value.value.value == "typing" + ): + return [ + slc.slice.value + for slc in node.slice + if isinstance(slc.slice, cst.Index) + ] + return [node] + if not node.operator.deep_equals(cst.BitOr()): + return [node] + + left_operands = flatten_binary_op(node.left) + right_operands = flatten_binary_op(node.right) + return left_operands + right_operands + + # Flatten the binary operation tree into a list of operands + operands = flatten_binary_op(updated_node) + + # Check for Optional case (None in union) + none_count = sum( + 1 for op in operands if isinstance(op, cst.Name) and op.value == "None" + ) + if none_count == 1 and len(operands) == 2: + # This is an Optional case - find the non-None operand + non_none = next( + op + for op in operands + if not (isinstance(op, cst.Name) and op.value == "None") + ) + return cst.Subscript( + cst.Attribute(cst.Name("typing"), cst.Name("Optional")), + [cst.SubscriptElement(cst.Index(non_none))], + ) + + # Regular Union case + return cst.Subscript( + cst.Attribute(cst.Name("typing"), cst.Name("Union")), + [cst.SubscriptElement(cst.Index(operand)) for operand in operands], + ) + + class CleanseFullTypeNames(cst.CSTTransformer): def leave_Call( self, original_node: cst.Call, updated_node: cst.Call @@ -357,7 +460,9 @@ def _get_clean_type_from_subscript( elif isinstance(inner_type, (cst.Name, cst.SimpleString)): clean_inner_type = _get_clean_type_from_expression(aliases, inner_type) else: - raise Exception("Logic error, unexpected type in Sequence!") + raise Exception( + f"Logic error, unexpected type in Sequence: {type(inner_type)}!" + ) return _get_wrapped_union_type( typecst.deep_replace(inner_type, clean_inner_type), @@ -386,9 +491,12 @@ def _get_clean_type_and_aliases( typestr = re.sub(OPTIONAL_RE, r"typing.Optional[\1]", typestr) # Now, parse the expression with LibCST. - cleanser = CleanseFullTypeNames() + typecst = parse_expression(typestr) - typecst = typecst.visit(cleanser) + typecst = typecst.visit(NormalizeUnions()) + assert isinstance(typecst, cst.BaseExpression) + typecst = typecst.visit(CleanseFullTypeNames()) + assert isinstance(typecst, cst.BaseExpression) aliases: List[Alias] = [] # Now, convert the type to allow for MetadataMatchType and MatchIfTrue values. @@ -397,7 +505,7 @@ def _get_clean_type_and_aliases( elif isinstance(typecst, (cst.Name, cst.SimpleString)): clean_type = _get_clean_type_from_expression(aliases, typecst) else: - raise Exception("Logic error, unexpected top level type!") + raise Exception(f"Logic error, unexpected top level type: {type(typecst)}!") # Now, insert OneOf/AllOf and MatchIfTrue into unions so we can typecheck their usage. # This allows us to put OneOf[SomeType] or MatchIfTrue[cst.SomeType] into any diff --git a/libcst/codegen/tests/test_codegen_clean.py b/libcst/codegen/tests/test_codegen_clean.py index 7b71ba36..5ee5903d 100644 --- a/libcst/codegen/tests/test_codegen_clean.py +++ b/libcst/codegen/tests/test_codegen_clean.py @@ -3,6 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import difflib import os import os.path @@ -20,12 +21,20 @@ class TestCodegenClean(UnitTest): new_code: str, module_name: str, ) -> None: - self.assertTrue( - old_code == new_code, - f"{module_name} needs new codegen, see " - + "`python -m libcst.codegen.generate --help` " - + "for instructions, or run `python -m libcst.codegen.generate all`", - ) + if old_code != new_code: + diff = difflib.unified_diff( + old_code.splitlines(keepends=True), + new_code.splitlines(keepends=True), + fromfile="old_code", + tofile="new_code", + ) + diff_str = "".join(diff) + self.fail( + f"{module_name} needs new codegen, see " + + "`python -m libcst.codegen.generate --help` " + + "for instructions, or run `python -m libcst.codegen.generate all`. " + + f"Diff:\n{diff_str}" + ) def test_codegen_clean_visitor_functions(self) -> None: """ @@ -123,3 +132,50 @@ class TestCodegenClean(UnitTest): # Now that we've done simple codegen, verify that it matches. self.assert_code_matches(old_code, new_code, "libcst.matchers._return_types") + + def test_normalize_unions(self) -> None: + """ + Verifies that NormalizeUnions correctly converts binary operations with | + into Union types, with special handling for Optional cases. + """ + import libcst as cst + from libcst.codegen.gen_matcher_classes import NormalizeUnions + + def assert_transforms_to(input_code: str, expected_code: str) -> None: + input_cst = cst.parse_expression(input_code) + expected_cst = cst.parse_expression(expected_code) + + result = input_cst.visit(NormalizeUnions()) + assert isinstance( + result, cst.BaseExpression + ), f"Expected BaseExpression, got {type(result)}" + + result_code = cst.Module(body=()).code_for_node(result) + expected_code_str = cst.Module(body=()).code_for_node(expected_cst) + + self.assertEqual( + result_code, + expected_code_str, + f"Expected {expected_code_str}, got {result_code}", + ) + + # Test regular union case + assert_transforms_to("foo | bar | baz", "typing.Union[foo, bar, baz]") + + # Test Optional case (None on right) + assert_transforms_to("foo | None", "typing.Optional[foo]") + + # Test Optional case (None on left) + assert_transforms_to("None | foo", "typing.Optional[foo]") + + # Test case with more than 2 operands including None (should remain Union) + assert_transforms_to("foo | bar | None", "typing.Union[foo, bar, None]") + + # Flatten existing Union types + assert_transforms_to( + "typing.Union[foo, typing.Union[bar, baz]]", "typing.Union[foo, bar, baz]" + ) + # Merge two kinds of union types + assert_transforms_to( + "foo | typing.Union[bar, baz]", "typing.Union[foo, bar, baz]" + ) From 50032882d0004df6332c8e67e1d8f94105893fef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 May 2025 08:41:36 +0100 Subject: [PATCH 583/632] build(deps): bump peg from 0.8.4 to 0.8.5 in /native (#1340) Bumps [peg](https://github.com/kevinmehall/rust-peg) from 0.8.4 to 0.8.5. - [Release notes](https://github.com/kevinmehall/rust-peg/releases) - [Commits](https://github.com/kevinmehall/rust-peg/compare/0.8.4...0.8.5) --- updated-dependencies: - dependency-name: peg dependency-version: 0.8.5 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 12 ++++++------ native/libcst/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index bf9b91db..a5b4f13f 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -431,9 +431,9 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "peg" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "295283b02df346d1ef66052a757869b2876ac29a6bb0ac3f5f7cd44aebe40e8f" +checksum = "9928cfca101b36ec5163e70049ee5368a8a1c3c6efc9ca9c5f9cc2f816152477" dependencies = [ "peg-macros", "peg-runtime", @@ -441,9 +441,9 @@ dependencies = [ [[package]] name = "peg-macros" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdad6a1d9cf116a059582ce415d5f5566aabcd4008646779dab7fdc2a9a9d426" +checksum = "6298ab04c202fa5b5d52ba03269fb7b74550b150323038878fe6c372d8280f71" dependencies = [ "peg-runtime", "proc-macro2", @@ -452,9 +452,9 @@ dependencies = [ [[package]] name = "peg-runtime" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3aeb8f54c078314c2065ee649a7241f46b9d8e418e1a9581ba0546657d7aa3a" +checksum = "132dca9b868d927b35b5dd728167b2dee150eb1ad686008fc71ccb298b776fca" [[package]] name = "plotters" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 4182a6d1..d639af1f 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -38,7 +38,7 @@ trace = ["peg/trace"] paste = "1.0.15" pyo3 = { version = "0.25", optional = true } thiserror = "2.0.12" -peg = "0.8.4" +peg = "0.8.5" annotate-snippets = "0.11.5" regex = "1.11.1" memchr = "2.7.4" From 3389d4e231ce47e0ca9b9978e5a479546819194a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 May 2025 08:42:06 +0100 Subject: [PATCH 584/632] build(deps): bump quote from 1.0.37 to 1.0.40 in /native (#1341) Bumps [quote](https://github.com/dtolnay/quote) from 1.0.37 to 1.0.40. - [Release notes](https://github.com/dtolnay/quote/releases) - [Commits](https://github.com/dtolnay/quote/compare/1.0.37...1.0.40) --- updated-dependencies: - dependency-name: quote dependency-version: 1.0.40 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index a5b4f13f..52f71ec1 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -563,9 +563,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.37" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] From 356ac005867b79f50284b1b103c6c397b376f791 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 May 2025 08:42:40 +0100 Subject: [PATCH 585/632] build(deps): bump syn from 2.0.87 to 2.0.101 in /native (#1338) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.87 to 2.0.101. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.87...2.0.101) --- updated-dependencies: - dependency-name: syn dependency-version: 2.0.101 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 52f71ec1..97ae6665 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -359,7 +359,7 @@ name = "libcst_derive" version = "1.7.0" dependencies = [ "quote", - "syn 2.0.87", + "syn 2.0.101", "trybuild", ] @@ -492,9 +492,9 @@ checksum = "31114a898e107c51bb1609ffaf55a0e011cf6a4d7f1170d0015a165082c0338b" [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -545,7 +545,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -558,7 +558,7 @@ dependencies = [ "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -670,7 +670,7 @@ checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -707,9 +707,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" dependencies = [ "proc-macro2", "quote", @@ -754,7 +754,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] From efae53d3652f3683341679b0105ae11ad29f4d0b Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Mon, 26 May 2025 03:02:44 -0700 Subject: [PATCH 586/632] Run CI tests on 3.14 (#1331) * Run CI tests on 3.14 * noop commit to retrigger CI --------- Co-authored-by: Zsolt Dollenstein --- .github/workflows/ci.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 75abc888..19329a87 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,15 @@ jobs: fail-fast: false matrix: os: [macos-latest, ubuntu-latest, windows-latest] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.13t"] + python-version: + - "3.9" + - "3.10" + - "3.11" + - "3.12" + - "3.13" + - "3.13t" + - "3.14" + - "3.14t" steps: - uses: actions/setup-python@v5 with: @@ -29,6 +37,7 @@ jobs: cache: pip cache-dependency-path: "pyproject.toml" python-version: ${{ matrix.python-version }} + allow-prereleases: true - uses: actions-rs/toolchain@v1 with: toolchain: stable From 1e67a9bb840b07478661925d46fdfc13dd2b05ed Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 27 May 2025 11:44:16 +0100 Subject: [PATCH 587/632] Build 3.14 wheels for testing (#1345) * Build 3.14 wheels for testing * use cibuildwheel 3 --- .github/workflows/build.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 429e4b82..1a6e728b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,8 +42,11 @@ jobs: if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV + - name: Enable building wheels for pre-release CPython versions + if: github.event_name != 'release' + run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v2.23.3 + uses: pypa/cibuildwheel@v3.0.0b2 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From ae64e0d534b58bdc89f6bfa41513a24873063b39 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 27 May 2025 14:15:49 +0100 Subject: [PATCH 588/632] ci: fix zizmor warnings (#1347) --- .github/workflows/build.yml | 4 +--- .github/workflows/ci.yml | 2 ++ .github/workflows/pypi_upload.yml | 5 ++--- zizmor.yml | 5 +++++ 4 files changed, 10 insertions(+), 6 deletions(-) create mode 100644 zizmor.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1a6e728b..dc2e54b3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -30,10 +30,8 @@ jobs: persist-credentials: false - uses: actions/setup-python@v5 with: - cache: pip - cache-dependency-path: "pyproject.toml" python-version: "3.12" - - uses: dtolnay/rust-toolchain@stable # zizmor: ignore[unpinned-uses] + - uses: dtolnay/rust-toolchain@stable - name: Set MACOSX_DEPLOYMENT_TARGET for Intel MacOS if: matrix.os == 'macos-13' run: >- diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 19329a87..409f7df1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,8 @@ on: - main pull_request: +permissions: {} + jobs: test: runs-on: ${{ matrix.os }} diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index f03151f7..e8798c6e 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -32,19 +32,18 @@ jobs: merge-multiple: true - uses: actions/setup-python@v5 with: - cache: pip - cache-dependency-path: "pyproject.toml" python-version: "3.10" - name: Install hatch run: pip install -U hatch - name: Build a source tarball env: LIBCST_NO_LOCAL_SCHEME: 1 + OUTDIR: ${{ steps.download.outputs.download-path }} run: >- hatch run python -m build --sdist - --outdir ${{ steps.download.outputs.download-path }} + --outdir "$OUTDIR" - name: Publish distribution 📦 to Test PyPI if: github.event_name == 'push' uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/zizmor.yml b/zizmor.yml new file mode 100644 index 00000000..faf5a0f0 --- /dev/null +++ b/zizmor.yml @@ -0,0 +1,5 @@ +rules: + unpinned-uses: + config: + policies: + "*": ref-pin \ No newline at end of file From 18d4f6aded907bd11b683fa54dad32ca04f84f75 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 27 May 2025 15:02:58 +0100 Subject: [PATCH 589/632] bump version to 1.8.0 (#1348) --- CHANGELOG.md | 21 +++++++++++++++++++++ MAINTAINERS.md | 11 +++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 5 files changed, 37 insertions(+), 5 deletions(-) create mode 100644 MAINTAINERS.md diff --git a/CHANGELOG.md b/CHANGELOG.md index b0ade209..bb50a57a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,24 @@ +# 1.8.0 - 2025-05-27 + +## Added +* Allow configuring empty formatter lists in codemod CLI by @ngoldbaum in https://github.com/Instagram/LibCST/pull/1319 +* Publish several new binary wheels + * macos intel by @hadialqattan in https://github.com/Instagram/LibCST/pull/1316 + * windows arm64 by @zsol in https://github.com/Instagram/LibCST/pull/1304 + * 3.13 CPython free-threaded by @zsol in https://github.com/Instagram/LibCST/pull/1333 + * (only on [test.pypi.org](https://test.pypi.org/project/libcst/#history)) 3.14 and 3.14 CPython free-threaded by @amyreese and @zsol in https://github.com/Instagram/LibCST/pull/1345 and https://github.com/Instagram/LibCST/pull/1331 +* Enable support for free-threaded CPython by @zsol in https://github.com/Instagram/LibCST/pull/1295 and https://github.com/Instagram/LibCST/pull/1335 + +## Updated +* update pyo3 to 0.25 by @ngoldbaum in https://github.com/Instagram/LibCST/pull/1324 +* Replace multiprocessing with ProcessPoolExecutor by @zsol in https://github.com/Instagram/LibCST/pull/1294 +* Support pipe syntax for Union types in codegen by @zsol in https://github.com/Instagram/LibCST/pull/1336 + +## New Contributors +* @hadialqattan made their first contribution in https://github.com/Instagram/LibCST/pull/1316 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.7.0...v1.8.0 + # 1.7.0 - 2025-03-13 ## Added diff --git a/MAINTAINERS.md b/MAINTAINERS.md new file mode 100644 index 00000000..eb94a2a0 --- /dev/null +++ b/MAINTAINERS.md @@ -0,0 +1,11 @@ +# How to make a new release + +1. Add a new entry to `CHANGELOG.md` (I normally use the [new release page](https://github.com/Instagram/LibCST/releases/new) to generate a changelog, then manually group) + 1. Follow the existing format: `Fixed`, `Added`, `Updated`, `Deprecated`, `Removed`, `New Contributors` sections, and the full changelog link at the bottom. + 1. Mention only user-visible changes - improvements to CI, tests, or development workflow aren't noteworthy enough + 1. Version bumps are generally not worth mentioning with some notable exceptions (like pyo3) + 1. Group related PRs into one bullet point if it makes sense +2. manually bump versions in `Cargo.toml` files in the repo +3. make a new PR with the above changes, get it reviewed and landed +4. make a new release on Github, create a new tag on publish, and copy the contents of the changelog entry in there +5. after publishing, check out the repo at the new tag, and run `cd native; cargo +nightly publish -Z package-workspace -p libcst_derive -p libcst` \ No newline at end of file diff --git a/native/Cargo.lock b/native/Cargo.lock index 97ae6665..d09b42a5 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -338,7 +338,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.7.0" +version = "1.8.0" dependencies = [ "annotate-snippets", "criterion", @@ -356,7 +356,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.7.0" +version = "1.8.0" dependencies = [ "quote", "syn 2.0.101", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index d639af1f..e9c7411c 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.7.0" +version = "1.8.0" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.5" annotate-snippets = "0.11.5" regex = "1.11.1" memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.6.0" } +libcst_derive = { path = "../libcst_derive", version = "1.8.0" } [dev-dependencies] criterion = { version = "0.5.1", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 8d5fd2ff..78c8554f 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.7.0" +version = "1.8.0" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From 482a2e5f0997273e3fb272346dac275c04e84807 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Jun 2025 03:59:34 +0100 Subject: [PATCH 590/632] build(deps): bump pypa/cibuildwheel from 3.0.0b2 to 3.0.0b4 (#1349) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 3.0.0b2 to 3.0.0b4. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v3.0.0b2...v3.0.0b4) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-version: 3.0.0b4 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index dc2e54b3..3306be71 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -44,7 +44,7 @@ jobs: if: github.event_name != 'release' run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v3.0.0b2 + uses: pypa/cibuildwheel@v3.0.0b4 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 935415a35a88fc4f72793b651f1786d714188a6a Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 3 Jun 2025 22:38:19 -0700 Subject: [PATCH 591/632] ci: stop using actions-rs actions (#1352) --- .github/workflows/ci.yml | 30 +++++++----------------------- 1 file changed, 7 insertions(+), 23 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 409f7df1..87a2ea11 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -40,9 +40,7 @@ jobs: cache-dependency-path: "pyproject.toml" python-version: ${{ matrix.python-version }} allow-prereleases: true - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable + - uses: dtolnay/rust-toolchain@stable - name: Build LibCST run: hatch -vv env create - name: Native Parser Tests @@ -135,26 +133,14 @@ jobs: with: python-version: ${{ matrix.python-version }} - name: test - uses: actions-rs/cargo@v1 - with: - command: test - args: --manifest-path=native/Cargo.toml --release + run: cargo test --manifest-path=native/Cargo.toml --release - name: test without python if: matrix.os == 'ubuntu-latest' - uses: actions-rs/cargo@v1 - with: - command: test - args: --manifest-path=native/Cargo.toml --release --no-default-features + run: cargo test --manifest-path=native/Cargo.toml --release --no-default-features - name: clippy - uses: actions-rs/clippy-check@v1 - with: - token: ${{ secrets.GITHUB_TOKEN }} - args: --manifest-path=native/Cargo.toml --all-features + run: cargo clippy --manifest-path=native/Cargo.toml --all-targets --all-features - name: compile-benchmarks - uses: actions-rs/cargo@v1 - with: - command: bench - args: --manifest-path=native/Cargo.toml --no-run + run: cargo bench --manifest-path=native/Cargo.toml --no-run rustfmt: name: Rustfmt @@ -167,7 +153,5 @@ jobs: with: components: rustfmt - run: rustup component add rustfmt - - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --all --manifest-path=native/Cargo.toml -- --check + - name: format + run: cargo fmt --all --manifest-path=native/Cargo.toml -- --check From e12eef5810c6e21cf2ef3703b0eae1d1042e4452 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Wed, 4 Jun 2025 14:02:21 -0700 Subject: [PATCH 592/632] add helper to convert nodes to matchers (#1351) * add helper to convert nodes to matchers * suppress type error --- libcst/helpers/matchers.py | 45 +++++++++++++++++++++++ libcst/helpers/tests/test_matchers.py | 53 +++++++++++++++++++++++++++ 2 files changed, 98 insertions(+) create mode 100644 libcst/helpers/matchers.py create mode 100644 libcst/helpers/tests/test_matchers.py diff --git a/libcst/helpers/matchers.py b/libcst/helpers/matchers.py new file mode 100644 index 00000000..e641c43c --- /dev/null +++ b/libcst/helpers/matchers.py @@ -0,0 +1,45 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# + +from dataclasses import fields, is_dataclass, MISSING + +from libcst import matchers +from libcst._nodes.base import CSTNode + + +def node_to_matcher( + node: CSTNode, *, match_syntactic_trivia: bool = False +) -> matchers.BaseMatcherNode: + """Convert a concrete node to a matcher.""" + if not is_dataclass(node): + raise ValueError(f"{node} is not a CSTNode") + + attrs = {} + for field in fields(node): + name = field.name + child = getattr(node, name) + if not match_syntactic_trivia and field.name.startswith("whitespace"): + # Not all nodes have whitespace fields, some have multiple, but they all + # start with whitespace* + child = matchers.DoNotCare() + elif field.default is not MISSING and child == field.default: + child = matchers.DoNotCare() + # pyre-ignore[29]: Union[MISSING_TYPE, ...] is not a function. + elif field.default_factory is not MISSING and child == field.default_factory(): + child = matchers.DoNotCare() + elif isinstance(child, (list, tuple)): + child = type(child)( + node_to_matcher(item, match_syntactic_trivia=match_syntactic_trivia) + for item in child + ) + elif hasattr(matchers, type(child).__name__): + child = node_to_matcher( + child, match_syntactic_trivia=match_syntactic_trivia + ) + attrs[name] = child + + matcher = getattr(matchers, type(node).__name__) + return matcher(**attrs) diff --git a/libcst/helpers/tests/test_matchers.py b/libcst/helpers/tests/test_matchers.py new file mode 100644 index 00000000..3f2b9b47 --- /dev/null +++ b/libcst/helpers/tests/test_matchers.py @@ -0,0 +1,53 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# + +from libcst import parse_expression, parse_statement +from libcst.helpers.matchers import node_to_matcher +from libcst.matchers import matches +from libcst.testing.utils import data_provider, UnitTest + + +class MatchersTest(UnitTest): + @data_provider( + ( + ('"some string"',), + ("call(some, **kwargs)",), + ("a[b.c]",), + ("[1 for _ in range(99) if False]",), + ) + ) + def test_reflexive_expressions(self, code: str) -> None: + node = parse_expression(code) + matcher = node_to_matcher(node) + self.assertTrue(matches(node, matcher)) + + @data_provider( + ( + ("def foo(a) -> None: pass",), + ("class F: ...",), + ("foo: bar",), + ) + ) + def test_reflexive_statements(self, code: str) -> None: + node = parse_statement(code) + matcher = node_to_matcher(node) + self.assertTrue(matches(node, matcher)) + + def test_whitespace(self) -> None: + code_ws = parse_expression("(foo , bar )") + code = parse_expression("(foo,bar)") + self.assertTrue( + matches( + code, + node_to_matcher(code_ws), + ) + ) + self.assertFalse( + matches( + code, + node_to_matcher(code_ws, match_syntactic_trivia=True), + ) + ) From ca1f81f0496a6382bce5537fcbea2f9d3610d652 Mon Sep 17 00:00:00 2001 From: zaicruvoir1rominet <105832258+zaicruvoir1rominet@users.noreply.github.com> Date: Sat, 7 Jun 2025 10:53:44 +0200 Subject: [PATCH 593/632] Avoid raising bare Exception (#1168) * Keep old exception messages (avoid breaking-changes for users relying on exception messages) * Move ``get_expected_str`` out of _exceptions.py, where it does not belong, to its own file in _parser/_parsing_check.py --- libcst/__init__.py | 3 +- libcst/_exceptions.py | 47 ++-------------- libcst/_nodes/base.py | 9 +-- libcst/_nodes/expression.py | 8 ++- libcst/_nodes/statement.py | 14 ++--- libcst/_nodes/tests/test_funcdef.py | 4 +- libcst/_nodes/tests/test_namedexpr.py | 4 +- libcst/_nodes/tests/test_removal_behavior.py | 2 +- libcst/_parser/_parsing_check.py | 53 ++++++++++++++++++ libcst/_parser/base_parser.py | 10 +--- libcst/_parser/conversions/expression.py | 56 +++++++++++++++---- libcst/_parser/conversions/params.py | 13 +++-- libcst/_parser/conversions/statement.py | 29 ++++++---- libcst/_parser/grammar.py | 8 +-- libcst/_parser/parso/pgen2/generator.py | 2 +- libcst/_parser/parso/python/tokenize.py | 15 ++--- libcst/_parser/production_decorator.py | 2 +- libcst/_parser/py_whitespace_parser.py | 19 +++++-- libcst/codegen/gen_matcher_classes.py | 18 +++--- libcst/codemod/_cli.py | 4 +- libcst/codemod/_codemod.py | 4 +- libcst/codemod/_visitor.py | 8 +-- .../commands/convert_format_to_fstring.py | 24 +++++--- .../convert_percent_format_to_fstring.py | 4 +- .../codemod/commands/fix_pyre_directives.py | 5 +- libcst/codemod/commands/rename.py | 6 +- libcst/codemod/visitors/_add_imports.py | 10 ++-- libcst/codemod/visitors/_remove_imports.py | 11 ++-- libcst/display/text.py | 4 +- libcst/helpers/_template.py | 12 ++-- libcst/helpers/common.py | 4 +- libcst/helpers/expression.py | 2 +- libcst/helpers/module.py | 4 +- libcst/matchers/_matcher_base.py | 48 ++++++++-------- libcst/metadata/base_provider.py | 2 +- libcst/metadata/full_repo_manager.py | 2 +- .../tests/test_type_inference_provider.py | 10 +--- libcst/metadata/type_inference_provider.py | 30 +++++----- libcst/tool.py | 10 ++-- 39 files changed, 305 insertions(+), 215 deletions(-) create mode 100644 libcst/_parser/_parsing_check.py diff --git a/libcst/__init__.py b/libcst/__init__.py index 2a8e47b3..4e82bc9e 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. from libcst._batched_visitor import BatchableCSTVisitor, visit_batched -from libcst._exceptions import MetadataException, ParserSyntaxError +from libcst._exceptions import CSTLogicError, MetadataException, ParserSyntaxError from libcst._flatten_sentinel import FlattenSentinel from libcst._maybe_sentinel import MaybeSentinel from libcst._metadata_dependent import MetadataDependent @@ -242,6 +242,7 @@ __all__ = [ "CSTVisitorT", "FlattenSentinel", "MaybeSentinel", + "CSTLogicError", "MetadataException", "ParserSyntaxError", "PartialParserConfig", diff --git a/libcst/_exceptions.py b/libcst/_exceptions.py index 5359ca3c..4d3dd386 100644 --- a/libcst/_exceptions.py +++ b/libcst/_exceptions.py @@ -4,16 +4,11 @@ # LICENSE file in the root directory of this source tree. from enum import auto, Enum -from typing import Any, Callable, final, Iterable, Optional, Sequence, Tuple, Union +from typing import Any, Callable, final, Optional, Sequence, Tuple -from libcst._parser.parso.pgen2.generator import ReservedString -from libcst._parser.parso.python.token import PythonTokenTypes, TokenType -from libcst._parser.types.token import Token from libcst._tabs import expand_tabs -_EOF_STR: str = "end of file (EOF)" -_INDENT_STR: str = "an indent" -_DEDENT_STR: str = "a dedent" + _NEWLINE_CHARS: str = "\r\n" @@ -21,42 +16,10 @@ class EOFSentinel(Enum): EOF = auto() -def get_expected_str( - encountered: Union[Token, EOFSentinel], - expected: Union[Iterable[Union[TokenType, ReservedString]], EOFSentinel], -) -> str: - if ( - isinstance(encountered, EOFSentinel) - or encountered.type is PythonTokenTypes.ENDMARKER - ): - encountered_str = _EOF_STR - elif encountered.type is PythonTokenTypes.INDENT: - encountered_str = _INDENT_STR - elif encountered.type is PythonTokenTypes.DEDENT: - encountered_str = _DEDENT_STR - else: - encountered_str = repr(encountered.string) +class CSTLogicError(Exception): + """General purpose internal error within LibCST itself.""" - if isinstance(expected, EOFSentinel): - expected_names = [_EOF_STR] - else: - expected_names = sorted( - [ - repr(el.name) if isinstance(el, TokenType) else repr(el.value) - for el in expected - ] - ) - - if len(expected_names) > 10: - # There's too many possibilities, so it's probably not useful to list them. - # Instead, let's just abbreviate the message. - return f"Unexpectedly encountered {encountered_str}." - else: - if len(expected_names) == 1: - expected_str = expected_names[0] - else: - expected_str = f"{', '.join(expected_names[:-1])}, or {expected_names[-1]}" - return f"Encountered {encountered_str}, but expected {expected_str}." + pass # pyre-fixme[2]: 'Any' type isn't pyre-strict. diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index d043cb64..666fe311 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -8,6 +8,7 @@ from copy import deepcopy from dataclasses import dataclass, field, fields, replace from typing import Any, cast, ClassVar, Dict, List, Mapping, Sequence, TypeVar, Union +from libcst import CSTLogicError from libcst._flatten_sentinel import FlattenSentinel from libcst._nodes.internal import CodegenState from libcst._removal_sentinel import RemovalSentinel @@ -237,7 +238,7 @@ class CSTNode(ABC): # validate return type of the user-defined `visitor.on_leave` method if not isinstance(leave_result, (CSTNode, RemovalSentinel, FlattenSentinel)): - raise Exception( + raise CSTValidationError( "Expected a node of type CSTNode or a RemovalSentinel, " + f"but got a return value of {type(leave_result).__name__}" ) @@ -382,7 +383,7 @@ class CSTNode(ABC): new_tree = self.visit(_ChildReplacementTransformer(old_node, new_node)) if isinstance(new_tree, (FlattenSentinel, RemovalSentinel)): # The above transform never returns *Sentinel, so this isn't possible - raise Exception("Logic error, cannot get a *Sentinel here!") + raise CSTLogicError("Logic error, cannot get a *Sentinel here!") return new_tree def deep_remove( @@ -399,7 +400,7 @@ class CSTNode(ABC): if isinstance(new_tree, FlattenSentinel): # The above transform never returns FlattenSentinel, so this isn't possible - raise Exception("Logic error, cannot get a FlattenSentinel here!") + raise CSTLogicError("Logic error, cannot get a FlattenSentinel here!") return new_tree @@ -421,7 +422,7 @@ class CSTNode(ABC): new_tree = self.visit(_ChildWithChangesTransformer(old_node, changes)) if isinstance(new_tree, (FlattenSentinel, RemovalSentinel)): # This is impossible with the above transform. - raise Exception("Logic error, cannot get a *Sentinel here!") + raise CSTLogicError("Logic error, cannot get a *Sentinel here!") return new_tree def __eq__(self: _CSTNodeSelfT, other: object) -> bool: diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index a8f2ac3e..00a1b4a7 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -17,6 +17,8 @@ from tokenize import ( ) from typing import Callable, Generator, Literal, Optional, Sequence, Union +from libcst import CSTLogicError + from libcst._add_slots import add_slots from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.base import CSTCodegenError, CSTNode, CSTValidationError @@ -666,7 +668,7 @@ class SimpleString(_BasePrefixedString): if len(quote) not in {1, 3}: # We shouldn't get here due to construction validation logic, # but handle the case anyway. - raise Exception(f"Invalid string {self.value}") + raise CSTLogicError(f"Invalid string {self.value}") # pyre-ignore We know via the above validation that we will only # ever return one of the four string literals. @@ -1010,7 +1012,7 @@ class ConcatenatedString(BaseString): elif isinstance(right, FormattedString): rightbytes = "b" in right.prefix else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") if leftbytes != rightbytes: raise CSTValidationError("Cannot concatenate string and bytes.") @@ -1688,7 +1690,7 @@ class Annotation(CSTNode): if default_indicator == "->": state.add_token(" ") else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") # Now, output the indicator and the rest of the annotation state.add_token(default_indicator) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index e6d6915f..1aba38d3 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -9,6 +9,8 @@ from abc import ABC, abstractmethod from dataclasses import dataclass, field from typing import Literal, Optional, Pattern, Sequence, Union +from libcst import CSTLogicError + from libcst._add_slots import add_slots from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.base import CSTNode, CSTValidationError @@ -1165,12 +1167,10 @@ class ImportAlias(CSTNode): ) try: self.evaluated_name - except Exception as e: - if str(e) == "Logic error!": - raise CSTValidationError( - "The imported name must be a valid qualified name." - ) - raise e + except CSTLogicError as e: + raise CSTValidationError( + "The imported name must be a valid qualified name." + ) from e def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ImportAlias": return ImportAlias( @@ -1199,7 +1199,7 @@ class ImportAlias(CSTNode): elif isinstance(node, Attribute): return f"{self._name(node.value)}.{node.attr.value}" else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") @property def evaluated_name(self) -> str: diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 087dde19..65a0ff07 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -1052,7 +1052,9 @@ def _parse_statement_force_38(code: str) -> cst.BaseCompoundStatement: code, config=cst.PartialParserConfig(python_version="3.8") ) if not isinstance(statement, cst.BaseCompoundStatement): - raise Exception("This function is expecting to parse compound statements only!") + raise ValueError( + "This function is expecting to parse compound statements only!" + ) return statement diff --git a/libcst/_nodes/tests/test_namedexpr.py b/libcst/_nodes/tests/test_namedexpr.py index bddd4f3d..6ebcf978 100644 --- a/libcst/_nodes/tests/test_namedexpr.py +++ b/libcst/_nodes/tests/test_namedexpr.py @@ -22,7 +22,9 @@ def _parse_statement_force_38(code: str) -> cst.BaseCompoundStatement: code, config=cst.PartialParserConfig(python_version="3.8") ) if not isinstance(statement, cst.BaseCompoundStatement): - raise Exception("This function is expecting to parse compound statements only!") + raise ValueError( + "This function is expecting to parse compound statements only!" + ) return statement diff --git a/libcst/_nodes/tests/test_removal_behavior.py b/libcst/_nodes/tests/test_removal_behavior.py index 9b1bf619..709b26f5 100644 --- a/libcst/_nodes/tests/test_removal_behavior.py +++ b/libcst/_nodes/tests/test_removal_behavior.py @@ -95,7 +95,7 @@ class RemovalBehavior(CSTNodeTest): self, before: str, after: str, visitor: Type[CSTTransformer] ) -> None: if before.endswith("\n") or after.endswith("\n"): - raise Exception("Test cases should not be newline-terminated!") + raise ValueError("Test cases should not be newline-terminated!") # Test doesn't have newline termination case before_module = parse_module(before) diff --git a/libcst/_parser/_parsing_check.py b/libcst/_parser/_parsing_check.py new file mode 100644 index 00000000..03283c95 --- /dev/null +++ b/libcst/_parser/_parsing_check.py @@ -0,0 +1,53 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Iterable, Union + +from libcst._exceptions import EOFSentinel +from libcst._parser.parso.pgen2.generator import ReservedString +from libcst._parser.parso.python.token import PythonTokenTypes, TokenType +from libcst._parser.types.token import Token + +_EOF_STR: str = "end of file (EOF)" +_INDENT_STR: str = "an indent" +_DEDENT_STR: str = "a dedent" + + +def get_expected_str( + encountered: Union[Token, EOFSentinel], + expected: Union[Iterable[Union[TokenType, ReservedString]], EOFSentinel], +) -> str: + if ( + isinstance(encountered, EOFSentinel) + or encountered.type is PythonTokenTypes.ENDMARKER + ): + encountered_str = _EOF_STR + elif encountered.type is PythonTokenTypes.INDENT: + encountered_str = _INDENT_STR + elif encountered.type is PythonTokenTypes.DEDENT: + encountered_str = _DEDENT_STR + else: + encountered_str = repr(encountered.string) + + if isinstance(expected, EOFSentinel): + expected_names = [_EOF_STR] + else: + expected_names = sorted( + [ + repr(el.name) if isinstance(el, TokenType) else repr(el.value) + for el in expected + ] + ) + + if len(expected_names) > 10: + # There's too many possibilities, so it's probably not useful to list them. + # Instead, let's just abbreviate the message. + return f"Unexpectedly encountered {encountered_str}." + else: + if len(expected_names) == 1: + expected_str = expected_names[0] + else: + expected_str = f"{', '.join(expected_names[:-1])}, or {expected_names[-1]}" + return f"Encountered {encountered_str}, but expected {expected_str}." diff --git a/libcst/_parser/base_parser.py b/libcst/_parser/base_parser.py index 6ab97ab8..d349bb14 100644 --- a/libcst/_parser/base_parser.py +++ b/libcst/_parser/base_parser.py @@ -26,12 +26,8 @@ from dataclasses import dataclass, field from typing import Generic, Iterable, List, Sequence, TypeVar, Union -from libcst._exceptions import ( - EOFSentinel, - get_expected_str, - ParserSyntaxError, - PartialParserSyntaxError, -) +from libcst._exceptions import EOFSentinel, ParserSyntaxError, PartialParserSyntaxError +from libcst._parser._parsing_check import get_expected_str from libcst._parser.parso.pgen2.generator import DFAState, Grammar, ReservedString from libcst._parser.parso.python.token import TokenType from libcst._parser.types.token import Token @@ -103,7 +99,7 @@ class BaseParser(Generic[_TokenT, _TokenTypeT, _NodeT]): def parse(self) -> _NodeT: # Ensure that we don't re-use parsers. if self.__was_parse_called: - raise Exception("Each parser object may only be used to parse once.") + raise ValueError("Each parser object may only be used to parse once.") self.__was_parse_called = True for token in self.tokens: diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index 1a46de2a..79d7ad78 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -12,7 +12,8 @@ from tokenize import ( Intnumber as INTNUMBER_RE, ) -from libcst._exceptions import PartialParserSyntaxError +from libcst import CSTLogicError +from libcst._exceptions import ParserSyntaxError, PartialParserSyntaxError from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.expression import ( Arg, @@ -327,7 +328,12 @@ def convert_boolop( # Convert all of the operations that have no precedence in a loop for op, rightexpr in grouper(rightexprs, 2): if op.string not in BOOLOP_TOKEN_LUT: - raise Exception(f"Unexpected token '{op.string}'!") + raise ParserSyntaxError( + f"Unexpected token '{op.string}'!", + lines=config.lines, + raw_line=0, + raw_column=0, + ) leftexpr = BooleanOperation( left=leftexpr, # pyre-ignore Pyre thinks that the type of the LUT is CSTNode. @@ -420,7 +426,12 @@ def convert_comp_op( ) else: # this should be unreachable - raise Exception(f"Unexpected token '{op.string}'!") + raise ParserSyntaxError( + f"Unexpected token '{op.string}'!", + lines=config.lines, + raw_line=0, + raw_column=0, + ) else: # A two-token comparison leftcomp, rightcomp = children @@ -451,7 +462,12 @@ def convert_comp_op( ) else: # this should be unreachable - raise Exception(f"Unexpected token '{leftcomp.string} {rightcomp.string}'!") + raise ParserSyntaxError( + f"Unexpected token '{leftcomp.string} {rightcomp.string}'!", + lines=config.lines, + raw_line=0, + raw_column=0, + ) @with_production("star_expr", "'*' expr") @@ -493,7 +509,12 @@ def convert_binop( # Convert all of the operations that have no precedence in a loop for op, rightexpr in grouper(rightexprs, 2): if op.string not in BINOP_TOKEN_LUT: - raise Exception(f"Unexpected token '{op.string}'!") + raise ParserSyntaxError( + f"Unexpected token '{op.string}'!", + lines=config.lines, + raw_line=0, + raw_column=0, + ) leftexpr = BinaryOperation( left=leftexpr, # pyre-ignore Pyre thinks that the type of the LUT is CSTNode. @@ -540,7 +561,12 @@ def convert_factor( ) ) else: - raise Exception(f"Unexpected token '{op.string}'!") + raise ParserSyntaxError( + f"Unexpected token '{op.string}'!", + lines=config.lines, + raw_line=0, + raw_column=0, + ) return WithLeadingWhitespace( UnaryOperation(operator=opnode, expression=factor.value), op.whitespace_before @@ -651,7 +677,7 @@ def convert_atom_expr_trailer( ) else: # This is an invalid trailer, so lets give up - raise Exception("Logic error!") + raise CSTLogicError() return WithLeadingWhitespace(atom, whitespace_before) @@ -870,9 +896,19 @@ def convert_atom_basic( Imaginary(child.string), child.whitespace_before ) else: - raise Exception(f"Unparseable number {child.string}") + raise ParserSyntaxError( + f"Unparseable number {child.string}", + lines=config.lines, + raw_line=0, + raw_column=0, + ) else: - raise Exception(f"Logic error, unexpected token {child.type.name}") + raise ParserSyntaxError( + f"Logic error, unexpected token {child.type.name}", + lines=config.lines, + raw_line=0, + raw_column=0, + ) @with_production("atom_squarebrackets", "'[' [testlist_comp_list] ']'") @@ -1447,7 +1483,7 @@ def convert_arg_assign_comp_for( if equal.string == ":=": val = convert_namedexpr_test(config, children) if not isinstance(val, WithLeadingWhitespace): - raise Exception( + raise TypeError( f"convert_namedexpr_test returned {val!r}, not WithLeadingWhitespace" ) return Arg(value=val.value) diff --git a/libcst/_parser/conversions/params.py b/libcst/_parser/conversions/params.py index 9ac7f1d1..5b29f95d 100644 --- a/libcst/_parser/conversions/params.py +++ b/libcst/_parser/conversions/params.py @@ -6,6 +6,7 @@ from typing import Any, List, Optional, Sequence, Union +from libcst import CSTLogicError from libcst._exceptions import PartialParserSyntaxError from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.expression import ( @@ -121,7 +122,7 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(*abc, *): ... # This should be unreachable, the grammar already disallows it. - raise Exception( + raise ValueError( "Cannot have multiple star ('*') markers in a single argument " + "list." ) @@ -136,7 +137,7 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(foo, /, *, /, bar): ... # This should be unreachable, the grammar already disallows it. - raise Exception( + raise ValueError( "Cannot have multiple slash ('/') markers in a single argument " + "list." ) @@ -168,7 +169,7 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(**kwargs, trailing=None) # This should be unreachable, the grammar already disallows it. - raise Exception("Cannot have any arguments after a kwargs expansion.") + raise ValueError("Cannot have any arguments after a kwargs expansion.") elif ( isinstance(param.star, str) and param.star == "*" and param.default is None ): @@ -181,7 +182,7 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(*first, *second): ... # This should be unreachable, the grammar already disallows it. - raise Exception( + raise ValueError( "Expected a keyword argument but found a starred positional " + "argument expansion." ) @@ -197,13 +198,13 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(**first, **second) # This should be unreachable, the grammar already disallows it. - raise Exception( + raise ValueError( "Multiple starred keyword argument expansions are not allowed in a " + "single argument list" ) else: # The state machine should never end up here. - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") return current_param diff --git a/libcst/_parser/conversions/statement.py b/libcst/_parser/conversions/statement.py index 608f002f..f96c6ea2 100644 --- a/libcst/_parser/conversions/statement.py +++ b/libcst/_parser/conversions/statement.py @@ -6,7 +6,8 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Type -from libcst._exceptions import PartialParserSyntaxError +from libcst import CSTLogicError +from libcst._exceptions import ParserSyntaxError, PartialParserSyntaxError from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.expression import ( Annotation, @@ -283,7 +284,9 @@ def convert_annassign(config: ParserConfig, children: Sequence[Any]) -> Any: whitespace_after=parse_simple_whitespace(config, equal.whitespace_after), ) else: - raise Exception("Invalid parser state!") + raise ParserSyntaxError( + "Invalid parser state!", lines=config.lines, raw_line=0, raw_column=0 + ) return AnnAssignPartial( annotation=Annotation( @@ -319,7 +322,13 @@ def convert_annassign(config: ParserConfig, children: Sequence[Any]) -> Any: def convert_augassign(config: ParserConfig, children: Sequence[Any]) -> Any: op, expr = children if op.string not in AUGOP_TOKEN_LUT: - raise Exception(f"Unexpected token '{op.string}'!") + raise ParserSyntaxError( + f"Unexpected token '{op.string}'!", + lines=config.lines, + raw_line=0, + raw_column=0, + ) + return AugAssignPartial( # pyre-ignore Pyre seems to think that the value of this LUT is CSTNode operator=AUGOP_TOKEN_LUT[op.string]( @@ -447,7 +456,7 @@ def convert_import_relative(config: ParserConfig, children: Sequence[Any]) -> An # This should be the dotted name, and we can't get more than # one, but lets be sure anyway if dotted_name is not None: - raise Exception("Logic error!") + raise CSTLogicError() dotted_name = child return ImportRelativePartial(relative=tuple(dots), module=dotted_name) @@ -644,7 +653,7 @@ def convert_raise_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: item=source.value, ) else: - raise Exception("Logic error!") + raise CSTLogicError() return WithLeadingWhitespace( Raise(whitespace_after_raise=whitespace_after_raise, exc=exc, cause=cause), @@ -893,7 +902,7 @@ def convert_try_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: if isinstance(clause, Token): if clause.string == "else": if orelse is not None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") orelse = Else( leading_lines=parse_empty_lines(config, clause.whitespace_before), whitespace_before_colon=parse_simple_whitespace( @@ -903,7 +912,7 @@ def convert_try_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: ) elif clause.string == "finally": if finalbody is not None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") finalbody = Finally( leading_lines=parse_empty_lines(config, clause.whitespace_before), whitespace_before_colon=parse_simple_whitespace( @@ -912,7 +921,7 @@ def convert_try_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: body=suite, ) else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") elif isinstance(clause, ExceptClausePartial): handlers.append( ExceptHandler( @@ -927,7 +936,7 @@ def convert_try_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: ) ) else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") return Try( leading_lines=parse_empty_lines(config, trytoken.whitespace_before), @@ -1333,7 +1342,7 @@ def convert_asyncable_stmt(config: ParserConfig, children: Sequence[Any]) -> Any asynchronous=asyncnode, leading_lines=leading_lines ) else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") @with_production("suite", "simple_stmt_suite | indented_suite") diff --git a/libcst/_parser/grammar.py b/libcst/_parser/grammar.py index 8e6ade59..ee65ef72 100644 --- a/libcst/_parser/grammar.py +++ b/libcst/_parser/grammar.py @@ -319,7 +319,7 @@ def validate_grammar() -> None: production_name = fn_productions[0].name expected_name = f"convert_{production_name}" if fn.__name__ != expected_name: - raise Exception( + raise ValueError( f"The conversion function for '{production_name}' " + f"must be called '{expected_name}', not '{fn.__name__}'." ) @@ -330,7 +330,7 @@ def _get_version_comparison(version: str) -> Tuple[str, PythonVersionInfo]: return (version[:2], parse_version_string(version[2:].strip())) if version[:1] in (">", "<"): return (version[:1], parse_version_string(version[1:].strip())) - raise Exception(f"Invalid version comparison specifier '{version}'") + raise ValueError(f"Invalid version comparison specifier '{version}'") def _compare_versions( @@ -350,7 +350,7 @@ def _compare_versions( return actual_version > requested_version if comparison == "<": return actual_version < requested_version - raise Exception(f"Invalid version comparison specifier '{comparison}'") + raise ValueError(f"Invalid version comparison specifier '{comparison}'") def _should_include( @@ -405,7 +405,7 @@ def get_nonterminal_conversions( if not _should_include_future(fn_production.future, future_imports): continue if fn_production.name in conversions: - raise Exception( + raise ValueError( f"Found duplicate '{fn_production.name}' production in grammar" ) conversions[fn_production.name] = fn diff --git a/libcst/_parser/parso/pgen2/generator.py b/libcst/_parser/parso/pgen2/generator.py index ae889f33..5e83741b 100644 --- a/libcst/_parser/parso/pgen2/generator.py +++ b/libcst/_parser/parso/pgen2/generator.py @@ -259,7 +259,7 @@ def generate_grammar(bnf_grammar: str, token_namespace: Any) -> Grammar[Any]: _calculate_tree_traversal(rule_to_dfas) if start_nonterminal is None: - raise Exception("could not find starting nonterminal!") + raise ValueError("could not find starting nonterminal!") return Grammar(start_nonterminal, rule_to_dfas, reserved_strings) diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index bfd159dd..711a8785 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -36,6 +36,7 @@ from collections import namedtuple from dataclasses import dataclass from typing import Dict, Generator, Iterable, Optional, Pattern, Set, Tuple +from libcst import CSTLogicError from libcst._parser.parso.python.token import PythonTokenTypes from libcst._parser.parso.utils import PythonVersionInfo, split_lines @@ -522,14 +523,14 @@ def _tokenize_lines_py36_or_below( # noqa: C901 if contstr: # continued string if endprog is None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") endmatch = endprog.match(line) if endmatch: pos = endmatch.end(0) if contstr_start is None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") if stashed is not None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") yield PythonToken(STRING, contstr + line[:pos], contstr_start, prefix) contstr = "" contline = None @@ -547,7 +548,7 @@ def _tokenize_lines_py36_or_below( # noqa: C901 ) if string: if stashed is not None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") yield PythonToken( FSTRING_STRING, string, @@ -572,7 +573,7 @@ def _tokenize_lines_py36_or_below( # noqa: C901 pos += quote_length if fstring_end_token is not None: if stashed is not None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") yield fstring_end_token continue @@ -885,12 +886,12 @@ def _tokenize_lines_py37_or_above( # noqa: C901 if contstr: # continued string if endprog is None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") endmatch = endprog.match(line) if endmatch: pos = endmatch.end(0) if contstr_start is None: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") yield PythonToken(STRING, contstr + line[:pos], contstr_start, prefix) contstr = "" contline = None diff --git a/libcst/_parser/production_decorator.py b/libcst/_parser/production_decorator.py index 41a817f8..d5ba52de 100644 --- a/libcst/_parser/production_decorator.py +++ b/libcst/_parser/production_decorator.py @@ -39,7 +39,7 @@ def with_production( # pyre-ignore: Pyre doesn't think that fn has a __name__ attribute fn_name = fn.__name__ if not fn_name.startswith("convert_"): - raise Exception( + raise ValueError( "A function with a production must be named 'convert_X', not " + f"'{fn_name}'." ) diff --git a/libcst/_parser/py_whitespace_parser.py b/libcst/_parser/py_whitespace_parser.py index b1fd9b5e..6b6573a6 100644 --- a/libcst/_parser/py_whitespace_parser.py +++ b/libcst/_parser/py_whitespace_parser.py @@ -5,6 +5,7 @@ from typing import List, Optional, Sequence, Tuple, Union +from libcst import CSTLogicError, ParserSyntaxError from libcst._nodes.whitespace import ( Comment, COMMENT_RE, @@ -103,10 +104,13 @@ def parse_trailing_whitespace( ) -> TrailingWhitespace: trailing_whitespace = _parse_trailing_whitespace(config, state) if trailing_whitespace is None: - raise Exception( + raise ParserSyntaxError( "Internal Error: Failed to parse TrailingWhitespace. This should never " + "happen because a TrailingWhitespace is never optional in the grammar, " - + "so this error should've been caught by parso first." + + "so this error should've been caught by parso first.", + lines=config.lines, + raw_line=state.line, + raw_column=state.column, ) return trailing_whitespace @@ -177,7 +181,9 @@ def _parse_indent( if state.column == len(line_str) and state.line == len(config.lines): # We're at EOF, treat this as a failed speculative parse return False - raise Exception("Internal Error: Column should be 0 when parsing an indent.") + raise CSTLogicError( + "Internal Error: Column should be 0 when parsing an indent." + ) if line_str.startswith(absolute_indent, state.column): state.column += len(absolute_indent) return True @@ -206,7 +212,12 @@ def _parse_newline( newline_str = newline_match.group(0) state.column += len(newline_str) if state.column != len(line_str): - raise Exception("Internal Error: Found a newline, but it wasn't the EOL.") + raise ParserSyntaxError( + "Internal Error: Found a newline, but it wasn't the EOL.", + lines=config.lines, + raw_line=state.line, + raw_column=state.column, + ) if state.line < len(config.lines): # this newline was the end of a line, and there's another line, # therefore we should move to the next line diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index 7f3cb6f8..e6def68c 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -8,7 +8,7 @@ from dataclasses import dataclass, fields from typing import Generator, List, Optional, Sequence, Set, Tuple, Type, Union import libcst as cst -from libcst import ensure_type, parse_expression +from libcst import CSTLogicError, ensure_type, parse_expression from libcst.codegen.gather import all_libcst_nodes, typeclasses CST_DIR: Set[str] = set(dir(cst)) @@ -283,9 +283,9 @@ class AddWildcardsToSequenceUnions(cst.CSTTransformer): # type blocks, even for sequence types. return if len(node.slice) != 1: - raise Exception( + raise ValueError( "Unexpected number of sequence elements inside Sequence type " - + "annotation!" + "annotation!" ) nodeslice = node.slice[0].slice if isinstance(nodeslice, cst.Index): @@ -449,10 +449,14 @@ def _get_clean_type_from_subscript( if typecst.value.deep_equals(cst.Name("Sequence")): # Lets attempt to widen the sequence type and alias it. if len(typecst.slice) != 1: - raise Exception("Logic error, Sequence shouldn't have more than one param!") + raise CSTLogicError( + "Logic error, Sequence shouldn't have more than one param!" + ) inner_type = typecst.slice[0].slice if not isinstance(inner_type, cst.Index): - raise Exception("Logic error, expecting Index for only Sequence element!") + raise CSTLogicError( + "Logic error, expecting Index for only Sequence element!" + ) inner_type = inner_type.value if isinstance(inner_type, cst.Subscript): @@ -460,7 +464,7 @@ def _get_clean_type_from_subscript( elif isinstance(inner_type, (cst.Name, cst.SimpleString)): clean_inner_type = _get_clean_type_from_expression(aliases, inner_type) else: - raise Exception( + raise CSTLogicError( f"Logic error, unexpected type in Sequence: {type(inner_type)}!" ) @@ -505,7 +509,7 @@ def _get_clean_type_and_aliases( elif isinstance(typecst, (cst.Name, cst.SimpleString)): clean_type = _get_clean_type_from_expression(aliases, typecst) else: - raise Exception(f"Logic error, unexpected top level type: {type(typecst)}!") + raise CSTLogicError(f"Logic error, unexpected top level type: {type(typecst)}!") # Now, insert OneOf/AllOf and MatchIfTrue into unions so we can typecheck their usage. # This allows us to put OneOf[SomeType] or MatchIfTrue[cst.SomeType] into any diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index 0cdec103..d9c70d05 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -51,7 +51,7 @@ def invoke_formatter(formatter_args: Sequence[str], code: AnyStr) -> AnyStr: # Make sure there is something to run if len(formatter_args) == 0: - raise Exception("No formatter configured but code formatting requested.") + raise ValueError("No formatter configured but code formatting requested.") # Invoke the formatter, giving it the code as stdin and assuming the formatted # code comes from stdout. @@ -593,7 +593,7 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 ) if jobs < 1: - raise Exception("Must have at least one job to process!") + raise ValueError("Must have at least one job to process!") if total == 0: return ParallelTransformResult(successes=0, failures=0, skips=0, warnings=0) diff --git a/libcst/codemod/_codemod.py b/libcst/codemod/_codemod.py index c0c3b2c7..e267f154 100644 --- a/libcst/codemod/_codemod.py +++ b/libcst/codemod/_codemod.py @@ -56,9 +56,9 @@ class Codemod(MetadataDependent, ABC): """ module = self.context.module if module is None: - raise Exception( + raise ValueError( f"Attempted access of {self.__class__.__name__}.module outside of " - + "transform_module()." + "transform_module()." ) return module diff --git a/libcst/codemod/_visitor.py b/libcst/codemod/_visitor.py index ab915c49..89248838 100644 --- a/libcst/codemod/_visitor.py +++ b/libcst/codemod/_visitor.py @@ -6,7 +6,7 @@ from typing import Mapping import libcst as cst -from libcst import MetadataDependent +from libcst import MetadataDependent, MetadataException from libcst.codemod._codemod import Codemod from libcst.codemod._context import CodemodContext from libcst.matchers import MatcherDecoratableTransformer, MatcherDecoratableVisitor @@ -69,14 +69,14 @@ class ContextAwareVisitor(MatcherDecoratableVisitor, MetadataDependent): if dependencies: wrapper = self.context.wrapper if wrapper is None: - raise Exception( + raise MetadataException( f"Attempting to instantiate {self.__class__.__name__} outside of " + "an active transform. This means that metadata hasn't been " + "calculated and we cannot successfully create this visitor." ) for dep in dependencies: if dep not in wrapper._metadata: - raise Exception( + raise MetadataException( f"Attempting to access metadata {dep.__name__} that was not a " + "declared dependency of parent transform! This means it is " + "not possible to compute this value. Please ensure that all " @@ -101,7 +101,7 @@ class ContextAwareVisitor(MatcherDecoratableVisitor, MetadataDependent): """ module = self.context.module if module is None: - raise Exception( + raise ValueError( f"Attempted access of {self.__class__.__name__}.module outside of " + "transform_module()." ) diff --git a/libcst/codemod/commands/convert_format_to_fstring.py b/libcst/codemod/commands/convert_format_to_fstring.py index ab98c0ea..43d19bce 100644 --- a/libcst/codemod/commands/convert_format_to_fstring.py +++ b/libcst/codemod/commands/convert_format_to_fstring.py @@ -9,6 +9,8 @@ from typing import Generator, List, Optional, Sequence, Set, Tuple import libcst as cst import libcst.matchers as m +from libcst import CSTLogicError +from libcst._exceptions import ParserSyntaxError from libcst.codemod import ( CodemodContext, ContextAwareTransformer, @@ -23,7 +25,7 @@ def _get_lhs(field: cst.BaseExpression) -> cst.BaseExpression: elif isinstance(field, (cst.Attribute, cst.Subscript)): return _get_lhs(field.value) else: - raise Exception("Unsupported node type!") + raise TypeError("Unsupported node type!") def _find_expr_from_field_name( @@ -48,7 +50,7 @@ def _find_expr_from_field_name( if isinstance(lhs, cst.Integer): index = int(lhs.value) if index < 0 or index >= len(args): - raise Exception(f"Logic error, arg sequence {index} out of bounds!") + raise CSTLogicError(f"Logic error, arg sequence {index} out of bounds!") elif isinstance(lhs, cst.Name): for i, arg in enumerate(args): kw = arg.keyword @@ -58,10 +60,12 @@ def _find_expr_from_field_name( index = i break if index is None: - raise Exception(f"Logic error, arg name {lhs.value} out of bounds!") + raise CSTLogicError(f"Logic error, arg name {lhs.value} out of bounds!") if index is None: - raise Exception(f"Logic error, unsupported fieldname expression {fieldname}!") + raise CSTLogicError( + f"Logic error, unsupported fieldname expression {fieldname}!" + ) # Format it! return field_expr.deep_replace(lhs, args[index].value) @@ -141,7 +145,7 @@ def _get_tokens( # noqa: C901 in_brackets -= 1 if in_brackets < 0: - raise Exception("Stray } in format string!") + raise ValueError("Stray } in format string!") if in_brackets == 0: field_name, format_spec, conversion = _get_field(format_accum) @@ -158,9 +162,11 @@ def _get_tokens( # noqa: C901 format_accum += char if in_brackets > 0: - raise Exception("Stray { in format string!") + raise ParserSyntaxError( + "Stray { in format string!", lines=[string], raw_line=0, raw_column=0 + ) if format_accum: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") # Yield the last bit of information yield (prefix, None, None, None) @@ -188,7 +194,7 @@ class SwitchStringQuotesTransformer(ContextAwareTransformer): def __init__(self, context: CodemodContext, avoid_quote: str) -> None: super().__init__(context) if avoid_quote not in {'"', "'"}: - raise Exception("Must specify either ' or \" single quote to avoid.") + raise ValueError("Must specify either ' or \" single quote to avoid.") self.avoid_quote: str = avoid_quote self.replace_quote: str = '"' if avoid_quote == "'" else "'" @@ -296,7 +302,7 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): ) in format_spec_tokens: if spec_format_spec is not None: # This shouldn't be possible, we don't allow it in the spec! - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") if spec_literal_text: format_spec_parts.append( cst.FormattedStringText(spec_literal_text) diff --git a/libcst/codemod/commands/convert_percent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py index 501c9621..d74624e4 100644 --- a/libcst/codemod/commands/convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/convert_percent_format_to_fstring.py @@ -53,12 +53,12 @@ class EscapeStringQuote(cst.CSTTransformer): original_node.prefix + quo + original_node.raw_value + quo ) if escaped_string.evaluated_value != original_node.evaluated_value: - raise Exception( + raise ValueError( f"Failed to escape string:\n original:{original_node.value}\n escaped:{escaped_string.value}" ) else: return escaped_string - raise Exception( + raise ValueError( f"Cannot find a good quote for escaping the SimpleString: {original_node.value}" ) return original_node diff --git a/libcst/codemod/commands/fix_pyre_directives.py b/libcst/codemod/commands/fix_pyre_directives.py index c3ab41b7..a9779d0f 100644 --- a/libcst/codemod/commands/fix_pyre_directives.py +++ b/libcst/codemod/commands/fix_pyre_directives.py @@ -7,6 +7,7 @@ from typing import Dict, Sequence, Union import libcst import libcst.matchers as m +from libcst import CSTLogicError from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand from libcst.helpers import insert_header_comments @@ -29,12 +30,12 @@ class FixPyreDirectivesCommand(VisitorBasedCodemodCommand): def visit_Module_header(self, node: libcst.Module) -> None: if self.in_module_header: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") self.in_module_header = True def leave_Module_header(self, node: libcst.Module) -> None: if not self.in_module_header: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") self.in_module_header = False def leave_EmptyLine( diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index f1fb4693..f3accdcd 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -153,7 +153,7 @@ class RenameCommand(VisitorBasedCodemodCommand): import_alias_name = import_alias.name import_alias_full_name = get_full_name_for_node(import_alias_name) if import_alias_full_name is None: - raise Exception("Could not parse full name for ImportAlias.name node.") + raise ValueError("Could not parse full name for ImportAlias.name node.") if self.old_name.startswith(import_alias_full_name + "."): replacement_module = self.gen_replacement_module(import_alias_full_name) @@ -286,7 +286,7 @@ class RenameCommand(VisitorBasedCodemodCommand): ) -> Union[cst.Name, cst.Attribute]: full_name_for_node = get_full_name_for_node(original_node) if full_name_for_node is None: - raise Exception("Could not parse full name for Attribute node.") + raise ValueError("Could not parse full name for Attribute node.") full_replacement_name = self.gen_replacement(full_name_for_node) # If a node has no associated QualifiedName, we are still inside an import statement. @@ -368,7 +368,7 @@ class RenameCommand(VisitorBasedCodemodCommand): ) -> Union[cst.Attribute, cst.Name]: name_or_attr_node: cst.BaseExpression = cst.parse_expression(dotted_expression) if not isinstance(name_or_attr_node, (cst.Name, cst.Attribute)): - raise Exception( + raise ValueError( "`parse_expression()` on dotted path returned non-Attribute-or-Name." ) return name_or_attr_node diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index f734af5c..eeab43ae 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -7,7 +7,7 @@ from collections import defaultdict from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst -from libcst import matchers as m, parse_statement +from libcst import CSTLogicError, matchers as m, parse_statement from libcst._nodes.statement import Import, ImportFrom, SimpleStatementLine from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer @@ -107,7 +107,7 @@ class AddImportsVisitor(ContextAwareTransformer): ) -> List[ImportItem]: imports = context.scratch.get(AddImportsVisitor.CONTEXT_KEY, []) if not isinstance(imports, list): - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") return imports @staticmethod @@ -136,7 +136,7 @@ class AddImportsVisitor(ContextAwareTransformer): """ if module == "__future__" and obj is None: - raise Exception("Cannot import __future__ directly!") + raise ValueError("Cannot import __future__ directly!") imports = AddImportsVisitor._get_imports_from_context(context) imports.append(ImportItem(module, obj, asname, relative)) context.scratch[AddImportsVisitor.CONTEXT_KEY] = imports @@ -157,9 +157,9 @@ class AddImportsVisitor(ContextAwareTransformer): # Verify that the imports are valid for imp in imps: if imp.module == "__future__" and imp.obj_name is None: - raise Exception("Cannot import __future__ directly!") + raise ValueError("Cannot import __future__ directly!") if imp.module == "__future__" and imp.alias is not None: - raise Exception("Cannot import __future__ objects with aliases!") + raise ValueError("Cannot import __future__ objects with aliases!") # Resolve relative imports if we have a module name imps = [imp.resolve_relative(self.context.full_package_name) for imp in imps] diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index 55940127..b625ee60 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -6,6 +6,7 @@ from typing import Any, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union import libcst as cst +from libcst import CSTLogicError from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer, ContextAwareVisitor from libcst.codemod.visitors._gather_unused_imports import GatherUnusedImportsVisitor @@ -45,7 +46,7 @@ class RemovedNodeVisitor(ContextAwareVisitor): self.context.full_package_name, import_node ) if module_name is None: - raise Exception("Cannot look up absolute module from relative import!") + raise ValueError("Cannot look up absolute module from relative import!") # We know any local names will refer to this as an alias if # there is one, and as the original name if there is not one @@ -72,7 +73,9 @@ class RemovedNodeVisitor(ContextAwareVisitor): # Look up the scope for this node, remove the import that caused it to exist. metadata_wrapper = self.context.wrapper if metadata_wrapper is None: - raise Exception("Cannot look up import, metadata is not computed for node!") + raise ValueError( + "Cannot look up import, metadata is not computed for node!" + ) scope_provider = metadata_wrapper.resolve(ScopeProvider) try: scope = scope_provider[node] @@ -185,7 +188,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): ) -> List[Tuple[str, Optional[str], Optional[str]]]: unused_imports = context.scratch.get(RemoveImportsVisitor.CONTEXT_KEY, []) if not isinstance(unused_imports, list): - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") return unused_imports @staticmethod @@ -255,7 +258,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): context.full_package_name, node ) if module_name is None: - raise Exception("Cannot look up absolute module from relative import!") + raise ValueError("Cannot look up absolute module from relative import!") for import_alias in names: RemoveImportsVisitor.remove_unused_import( context, diff --git a/libcst/display/text.py b/libcst/display/text.py index 3c6dc288..0e270009 100644 --- a/libcst/display/text.py +++ b/libcst/display/text.py @@ -8,7 +8,7 @@ from __future__ import annotations import dataclasses from typing import List, Sequence -from libcst import CSTNode +from libcst import CSTLogicError, CSTNode from libcst.helpers import filter_node_fields _DEFAULT_INDENT: str = " " @@ -84,7 +84,7 @@ def _node_repr_recursive( # noqa: C901 else: child_tokens.append("[]") else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") # Handle indentation and trailing comma. split_by_line = "".join(child_tokens).split("\n") diff --git a/libcst/helpers/_template.py b/libcst/helpers/_template.py index e3f915a5..e205e0af 100644 --- a/libcst/helpers/_template.py +++ b/libcst/helpers/_template.py @@ -45,12 +45,12 @@ def unmangled_name(var: str) -> Optional[str]: def mangle_template(template: str, template_vars: Set[str]) -> str: if TEMPLATE_PREFIX in template or TEMPLATE_SUFFIX in template: - raise Exception("Cannot parse a template containing reserved strings") + raise ValueError("Cannot parse a template containing reserved strings") for var in template_vars: original = f"{{{var}}}" if original not in template: - raise Exception( + raise ValueError( f'Template string is missing a reference to "{var}" referred to in kwargs' ) template = template.replace(original, mangled_name(var)) @@ -142,7 +142,7 @@ class TemplateTransformer(cst.CSTTransformer): name for name in template_replacements if name not in supported_vars } if unsupported_vars: - raise Exception( + raise ValueError( f'Template replacement for "{next(iter(unsupported_vars))}" is unsupported' ) @@ -350,7 +350,7 @@ class TemplateChecker(cst.CSTVisitor): def visit_Name(self, node: cst.Name) -> None: for var in self.template_vars: if node.value == mangled_name(var): - raise Exception(f'Template variable "{var}" was not replaced properly') + raise ValueError(f'Template variable "{var}" was not replaced properly') def unmangle_nodes( @@ -424,8 +424,8 @@ def parse_template_statement( if not isinstance( new_statement, (cst.SimpleStatementLine, cst.BaseCompoundStatement) ): - raise Exception( - f"Expected a statement but got a {new_statement.__class__.__name__}!" + raise TypeError( + f"Expected a statement but got a {new_statement.__class__.__qualname__}!" ) new_statement.visit(TemplateChecker({name for name in template_replacements})) return new_statement diff --git a/libcst/helpers/common.py b/libcst/helpers/common.py index 16c77669..dee73aa4 100644 --- a/libcst/helpers/common.py +++ b/libcst/helpers/common.py @@ -19,7 +19,7 @@ def ensure_type(node: object, nodetype: Type[T]) -> T: """ if not isinstance(node, nodetype): - raise Exception( - f"Expected a {nodetype.__name__} but got a {node.__class__.__name__}!" + raise ValueError( + f"Expected a {nodetype.__name__} but got a {node.__class__.__qualname__}!" ) return node diff --git a/libcst/helpers/expression.py b/libcst/helpers/expression.py index beb5f324..5ae016cf 100644 --- a/libcst/helpers/expression.py +++ b/libcst/helpers/expression.py @@ -38,5 +38,5 @@ def get_full_name_for_node_or_raise(node: Union[str, cst.CSTNode]) -> str: """ full_name = get_full_name_for_node(node) if full_name is None: - raise Exception(f"Not able to parse full name for: {node}") + raise ValueError(f"Not able to parse full name for: {node}") return full_name diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 37e6af08..2b2973bf 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -80,7 +80,7 @@ def get_absolute_module_for_import_or_raise( ) -> str: module = get_absolute_module_for_import(current_module, import_node) if module is None: - raise Exception(f"Unable to compute absolute module for {import_node}") + raise ValueError(f"Unable to compute absolute module for {import_node}") return module @@ -121,7 +121,7 @@ def get_absolute_module_from_package_for_import_or_raise( ) -> str: module = get_absolute_module_from_package_for_import(current_package, import_node) if module is None: - raise Exception(f"Unable to compute absolute module for {import_node}") + raise ValueError(f"Unable to compute absolute module for {import_node}") return module diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 2da2ff4c..1727f0df 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -29,7 +29,7 @@ from typing import ( import libcst import libcst.metadata as meta -from libcst import FlattenSentinel, MaybeSentinel, RemovalSentinel +from libcst import CSTLogicError, FlattenSentinel, MaybeSentinel, RemovalSentinel from libcst._metadata_dependent import LazyValue @@ -143,7 +143,7 @@ class TypeOf(Generic[_MatcherTypeT], BaseMatcherNode): for option in options: if isinstance(option, TypeOf): if option.initalized: - raise Exception( + raise ValueError( "Cannot chain an uninitalized TypeOf with an initalized one" ) actual_options.extend(option._raw_options) @@ -213,7 +213,7 @@ class OneOf(Generic[_MatcherT], BaseMatcherNode): actual_options: List[_MatcherT] = [] for option in options: if isinstance(option, AllOf): - raise Exception("Cannot use AllOf and OneOf in combination!") + raise ValueError("Cannot use AllOf and OneOf in combination!") elif isinstance(option, (OneOf, TypeOf)): actual_options.extend(option.options) else: @@ -234,7 +234,7 @@ class OneOf(Generic[_MatcherT], BaseMatcherNode): return OneOf(self, other) def __and__(self, other: _OtherNodeT) -> NoReturn: - raise Exception("Cannot use AllOf and OneOf in combination!") + raise ValueError("Cannot use AllOf and OneOf in combination!") def __invert__(self) -> "AllOf[_MatcherT]": # Invert using De Morgan's Law so we don't have to complicate types. @@ -286,9 +286,9 @@ class AllOf(Generic[_MatcherT], BaseMatcherNode): actual_options: List[_MatcherT] = [] for option in options: if isinstance(option, OneOf): - raise Exception("Cannot use AllOf and OneOf in combination!") + raise ValueError("Cannot use AllOf and OneOf in combination!") elif isinstance(option, TypeOf): - raise Exception("Cannot use AllOf and TypeOf in combination!") + raise ValueError("Cannot use AllOf and TypeOf in combination!") elif isinstance(option, AllOf): actual_options.extend(option.options) else: @@ -306,7 +306,7 @@ class AllOf(Generic[_MatcherT], BaseMatcherNode): # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> NoReturn: - raise Exception("Cannot use AllOf and OneOf in combination!") + raise ValueError("Cannot use AllOf and OneOf in combination!") def __and__(self, other: _OtherNodeT) -> "AllOf[Union[_MatcherT, _OtherNodeT]]": return AllOf(self, other) @@ -431,7 +431,7 @@ class _ExtractMatchingNode(Generic[_MatcherT]): # that are captured with an and, either all of them will be assigned the # same node, or none of them. It makes more sense to move the SaveMatchedNode # up to wrap the AllOf. - raise Exception( + raise ValueError( ( "Cannot use AllOf with SavedMatchedNode children! Instead, you should " + "use SaveMatchedNode(AllOf(options...))." @@ -447,10 +447,10 @@ class _ExtractMatchingNode(Generic[_MatcherT]): def __invert__(self) -> "_MatcherT": # This doesn't make sense. We don't want to capture a node only if it # doesn't match, since this will never capture anything. - raise Exception( + raise ValueError( ( "Cannot invert a SaveMatchedNode. Instead you should wrap SaveMatchedNode " - + "around your inversion itself" + "around your inversion itself" ) ) @@ -761,7 +761,9 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): n: int, ) -> None: if n < 0: - raise Exception(f"{self.__class__.__name__} n attribute must be positive") + raise ValueError( + f"{self.__class__.__qualname__} n attribute must be positive" + ) self._n: int = n self._matcher: Union[_MatcherT, DoNotCareSentinel] = matcher @@ -784,13 +786,13 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: object) -> NoReturn: - raise Exception("AtLeastN cannot be used in a OneOf matcher") + raise ValueError("AtLeastN cannot be used in a OneOf matcher") def __and__(self, other: object) -> NoReturn: - raise Exception("AtLeastN cannot be used in an AllOf matcher") + raise ValueError("AtLeastN cannot be used in an AllOf matcher") def __invert__(self) -> NoReturn: - raise Exception("Cannot invert an AtLeastN matcher!") + raise ValueError("Cannot invert an AtLeastN matcher!") def __repr__(self) -> str: if self._n == 0: @@ -863,7 +865,9 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): n: int, ) -> None: if n < 0: - raise Exception(f"{self.__class__.__name__} n attribute must be positive") + raise ValueError( + f"{self.__class__.__qualname__} n attribute must be positive" + ) self._n: int = n self._matcher: Union[_MatcherT, DoNotCareSentinel] = matcher @@ -887,13 +891,13 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: object) -> NoReturn: - raise Exception("AtMostN cannot be used in a OneOf matcher") + raise ValueError("AtMostN cannot be used in a OneOf matcher") def __and__(self, other: object) -> NoReturn: - raise Exception("AtMostN cannot be used in an AllOf matcher") + raise ValueError("AtMostN cannot be used in an AllOf matcher") def __invert__(self) -> NoReturn: - raise Exception("Cannot invert an AtMostN matcher!") + raise ValueError("Cannot invert an AtMostN matcher!") def __repr__(self) -> str: if self._n == 1: @@ -1158,7 +1162,7 @@ def _sequence_matches( # noqa: C901 else: # There are no other types of wildcard consumers, but we're making # pyre happy with that fact. - raise Exception(f"Logic error unrecognized wildcard {type(matcher)}!") + raise CSTLogicError(f"Logic error unrecognized wildcard {type(matcher)}!") elif isinstance(matcher, _ExtractMatchingNode): # See if the raw matcher matches. If it does, capture the sequence we matched and store it. result = _sequence_matches( @@ -1354,7 +1358,7 @@ def _metadata_matches( # noqa: C901 return None return {} if actual_value == metadata.value else None else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") def _node_matches( # noqa: C901 @@ -1918,7 +1922,7 @@ def replace( elif isinstance(tree, meta.MetadataWrapper): return tree.module.deep_clone() else: - raise Exception("Logic error!") + raise CSTLogicError("Logic error!") if isinstance(tree, meta.MetadataWrapper) and metadata_resolver is None: # Provide a convenience for calling replace directly on a MetadataWrapper. @@ -1935,5 +1939,5 @@ def replace( new_tree = tree.visit(replacer) if isinstance(new_tree, FlattenSentinel): # The above transform never returns FlattenSentinel, so this isn't possible - raise Exception("Logic error, cannot get a FlattenSentinel here!") + raise CSTLogicError("Logic error, cannot get a FlattenSentinel here!") return new_tree diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index 811fed62..5d93fbe8 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -77,7 +77,7 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): self._computed: MutableMapping["CSTNode", MaybeLazyMetadataT] = {} if self.gen_cache and cache is None: # The metadata provider implementation is responsible to store and use cache. - raise Exception( + raise ValueError( f"Cache is required for initializing {self.__class__.__name__}." ) self.cache = cache diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index 770ba1f6..ab6430d8 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -85,7 +85,7 @@ class FullRepoManager: MetadataWrapper(module, cache=manager.get_cache_for_path("a.py")) """ if path not in self._paths: - raise Exception( + raise ValueError( "The path needs to be in paths parameter when constructing FullRepoManager for efficient batch processing." ) # Make sure that the cache is available to us. If the user called diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index 50ca3458..a0a70a8c 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -63,17 +63,11 @@ class TypeInferenceProviderTest(UnitTest): @classmethod def setUpClass(cls) -> None: os.chdir(TEST_SUITE_PATH) - try: - subprocess.run(["pyre", "-n", "start", "--no-watchman"]) - except subprocess.TimeoutExpired as exc: - raise exc + subprocess.run(["pyre", "-n", "start", "--no-watchman"]) @classmethod def tearDownClass(cls) -> None: - try: - subprocess.run(["pyre", "-n", "stop"], cwd=TEST_SUITE_PATH) - except subprocess.TimeoutExpired as exc: - raise exc + subprocess.run(["pyre", "-n", "stop"], cwd=TEST_SUITE_PATH) @data_provider( ((TEST_SUITE_PATH / "simple_class.py", TEST_SUITE_PATH / "simple_class.json"),) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index f00c97b6..8a90c26b 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -14,6 +14,11 @@ from libcst.metadata.base_provider import BatchableMetadataProvider from libcst.metadata.position_provider import PositionProvider +class TypeInferenceError(Exception): + """An attempt to access inferred type annotation + (through Pyre Query API) failed.""" + + class Position(TypedDict): line: int column: int @@ -60,17 +65,19 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): ) -> Mapping[str, object]: params = ",".join(f"path='{root_path / path}'" for path in paths) cmd_args = ["pyre", "--noninteractive", "query", f"types({params})"] - try: - stdout, stderr, return_code = run_command(cmd_args, timeout=timeout) - except subprocess.TimeoutExpired as exc: - raise exc - if return_code != 0: - raise Exception(f"stderr:\n {stderr}\nstdout:\n {stdout}") + result = subprocess.run( + cmd_args, capture_output=True, timeout=timeout, text=True + ) + try: - resp = json.loads(stdout)["response"] + result.check_returncode() + resp = json.loads(result.stdout)["response"] except Exception as e: - raise Exception(f"{e}\n\nstderr:\n {stderr}\nstdout:\n {stdout}") + raise TypeInferenceError( + f"{e}\n\nstderr:\n {result.stderr}\nstdout:\n {result.stdout}" + ) from e + return {path: _process_pyre_data(data) for path, data in zip(paths, resp)} def __init__(self, cache: PyreData) -> None: @@ -104,13 +111,6 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): self._parse_metadata(node) -def run_command( - cmd_args: List[str], timeout: Optional[int] = None -) -> Tuple[str, str, int]: - process = subprocess.run(cmd_args, capture_output=True, timeout=timeout) - return process.stdout.decode(), process.stderr.decode(), process.returncode - - class RawPyreData(TypedDict): path: str types: Sequence[InferredType] diff --git a/libcst/tool.py b/libcst/tool.py index 2a04a172..6dc410cc 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -21,7 +21,7 @@ from typing import Any, Callable, Dict, List, Tuple, Type import yaml -from libcst import LIBCST_VERSION, parse_module, PartialParserConfig +from libcst import CSTLogicError, LIBCST_VERSION, parse_module, PartialParserConfig from libcst._parser.parso.utils import parse_version_string from libcst.codemod import ( CodemodCommand, @@ -191,7 +191,7 @@ def _find_and_load_config(proc_name: str) -> Dict[str, Any]: requires_config = bool(os.environ.get("LIBCST_TOOL_REQUIRE_CONFIG", "")) if requires_config and not found_config: - raise Exception( + raise FileNotFoundError( f"Did not find a {CONFIG_FILE_NAME} in current directory or any " + "parent directory! Perhaps you meant to run this command from a " + "configured subdirectory, or you need to initialize a new project " @@ -391,7 +391,7 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 # full-repo metadata since there is no path. if any(p == "-" for p in args.path): if len(args.path) > 1: - raise Exception("Cannot specify multiple paths when reading from stdin!") + raise ValueError("Cannot specify multiple paths when reading from stdin!") print("Codemodding from stdin", file=sys.stderr) oldcode = sys.stdin.read() @@ -478,7 +478,7 @@ class _ListSerializer(_SerializerBase): def _serialize_impl(self, key: str, value: object) -> str: if not isinstance(value, list): - raise Exception("Can only serialize lists!") + raise ValueError("Can only serialize lists!") if self.newlines: values = [f"- {v!r}" for v in value] return f"{key}:{os.linesep}{os.linesep.join(values)}" @@ -539,7 +539,7 @@ def _initialize_impl(proc_name: str, command_args: List[str]) -> int: # For safety, verify that it parses to the identical file. actual_config = yaml.safe_load(config_str) if actual_config != default_config: - raise Exception("Logic error, serialization is invalid!") + raise CSTLogicError("Logic error, serialization is invalid!") config_file = os.path.abspath(os.path.join(args.path, CONFIG_FILE_NAME)) with open(config_file, "w") as fp: From 5a6970a225d3544532507110b421f7c539d14eb6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Jun 2025 01:54:21 -0700 Subject: [PATCH 594/632] build(deps): bump criterion from 0.5.1 to 0.6.0 in /native (#1339) Bumps [criterion](https://github.com/bheisler/criterion.rs) from 0.5.1 to 0.6.0. - [Changelog](https://github.com/bheisler/criterion.rs/blob/master/CHANGELOG.md) - [Commits](https://github.com/bheisler/criterion.rs/compare/0.5.1...0.6.0) --- updated-dependencies: - dependency-name: criterion dependency-version: 0.6.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 241 ++++++++------------------------------- native/libcst/Cargo.toml | 2 +- 2 files changed, 48 insertions(+), 195 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index d09b42a5..634b96ef 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -39,12 +39,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "bitflags" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" - [[package]] name = "bumpalo" version = "3.12.0" @@ -57,15 +51,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" -[[package]] -name = "cc" -version = "1.0.83" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" -dependencies = [ - "libc", -] - [[package]] name = "cfg-if" version = "1.0.0" @@ -101,18 +86,18 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.0" +version = "4.5.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d5f1946157a96594eb2d2c10eb7ad9a2b27518cb3000209dec700c35df9197d" +checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.4.0" +version = "4.5.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78116e32a042dd73c2901f0dc30790d20ff3447f3e3472fad359e8c3d282bcd6" +checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120" dependencies = [ "anstyle", "clap_lex", @@ -120,31 +105,28 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.5.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "criterion" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679" dependencies = [ "anes", "cast", "ciborium", "clap", "criterion-plot", - "is-terminal", - "itertools 0.10.5", + "itertools 0.13.0", "num-traits", - "once_cell", "oorandom", "plotters", "rayon", "regex", "serde", - "serde_derive", "serde_json", "tinytemplate", "walkdir", @@ -213,27 +195,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" -[[package]] -name = "errno" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "glob" version = "0.3.0" @@ -258,12 +219,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" - [[package]] name = "indexmap" version = "2.4.0" @@ -280,17 +235,6 @@ version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" -[[package]] -name = "is-terminal" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" -dependencies = [ - "hermit-abi", - "rustix", - "windows-sys", -] - [[package]] name = "itertools" version = "0.10.5" @@ -317,19 +261,14 @@ checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" [[package]] name = "js-sys" -version = "0.3.58" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fac17f7123a73ca62df411b1bf727ccc805daa070338fda671c86dac1bdc27" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ + "once_cell", "wasm-bindgen", ] -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - [[package]] name = "libc" version = "0.2.149" @@ -359,16 +298,10 @@ name = "libcst_derive" version = "1.8.0" dependencies = [ "quote", - "syn 2.0.101", + "syn", "trybuild", ] -[[package]] -name = "linux-raw-sys" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" - [[package]] name = "log" version = "0.4.17" @@ -458,9 +391,9 @@ checksum = "132dca9b868d927b35b5dd728167b2dee150eb1ad686008fc71ccb298b776fca" [[package]] name = "plotters" -version = "0.3.1" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a3fd9ec30b9749ce28cd91f255d569591cdf937fe280c312143e3c4bad6f2a" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" dependencies = [ "num-traits", "plotters-backend", @@ -471,15 +404,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.2" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d88417318da0eaf0fdcdb51a0ee6c3bed624333bff8f946733049380be67ac1c" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" [[package]] name = "plotters-svg" -version = "0.3.1" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521fa9638fa597e1dc53e9412a4f9cefb01187ee1f7413076f9e6749e2885ba9" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" dependencies = [ "plotters-backend", ] @@ -545,7 +478,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.101", + "syn", ] [[package]] @@ -558,7 +491,7 @@ dependencies = [ "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.101", + "syn", ] [[package]] @@ -620,17 +553,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] -name = "rustix" -version = "0.38.19" +name = "rustversion" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys", -] +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" [[package]] name = "ryu" @@ -670,7 +596,7 @@ checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn", ] [[package]] @@ -694,17 +620,6 @@ dependencies = [ "serde", ] -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - [[package]] name = "syn" version = "2.0.101" @@ -754,7 +669,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn", ] [[package]] @@ -818,9 +733,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.1" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-width" @@ -847,34 +762,35 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.81" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c53b543413a17a202f4be280a7e5c62a1c69345f5de525ee64f8cfdbc954994" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", + "once_cell", + "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.81" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5491a68ab4500fa6b4d726bd67408630c3dbe9c4fe7bda16d5c82a1fd8c7340a" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", - "lazy_static", "log", "proc-macro2", "quote", - "syn 1.0.109", + "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.81" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c441e177922bc58f1e12c022624b6216378e5febc2f0533e41ba443d505b80aa" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -882,28 +798,31 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.81" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d94ac45fcf608c1f45ef53e748d35660f168490c10b23704c7779ab8f5c3048" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.81" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a89911bd99e5f3659ec4acf9c4d93b0a90fe4a2a11f15328472058edc5261be" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "web-sys" -version = "0.3.58" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fed94beee57daf8dd7d51f2b15dc2bcde92d7a72304cdf662a4371008b71b90" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" dependencies = [ "js-sys", "wasm-bindgen", @@ -940,72 +859,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - [[package]] name = "winnow" version = "0.6.18" diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index e9c7411c..c09abe37 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -45,7 +45,7 @@ memchr = "2.7.4" libcst_derive = { path = "../libcst_derive", version = "1.8.0" } [dev-dependencies] -criterion = { version = "0.5.1", features = ["html_reports"] } +criterion = { version = "0.6.0", features = ["html_reports"] } difference = "2.0.0" rayon = "1.10.0" itertools = "0.13.0" From 70ccffc543fc137bea916f2659502e59883126d4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Jun 2025 01:54:32 -0700 Subject: [PATCH 595/632] build(deps): bump itertools from 0.13.0 to 0.14.0 in /native (#1337) Bumps [itertools](https://github.com/rust-itertools/itertools) from 0.13.0 to 0.14.0. - [Changelog](https://github.com/rust-itertools/itertools/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-itertools/itertools/compare/v0.13.0...v0.14.0) --- updated-dependencies: - dependency-name: itertools dependency-version: 0.14.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 6 +++--- native/libcst/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 634b96ef..c80f0b94 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -246,9 +246,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" dependencies = [ "either", ] @@ -282,7 +282,7 @@ dependencies = [ "annotate-snippets", "criterion", "difference", - "itertools 0.13.0", + "itertools 0.14.0", "libcst_derive", "memchr", "paste", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index c09abe37..309eeec2 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -48,7 +48,7 @@ libcst_derive = { path = "../libcst_derive", version = "1.8.0" } criterion = { version = "0.6.0", features = ["html_reports"] } difference = "2.0.0" rayon = "1.10.0" -itertools = "0.13.0" +itertools = "0.14.0" [[bench]] name = "parser_benchmark" From b818c0c98347694d3a6b69259f7de5f318340439 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sat, 7 Jun 2025 14:06:28 +0100 Subject: [PATCH 596/632] put itertools-0.13.0 back into lockfile --- native/Cargo.lock | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/native/Cargo.lock b/native/Cargo.lock index c80f0b94..2db93c61 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -244,6 +244,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.14.0" From 9f3629e58e0bdb89efc7969aa7c9e313d6790d9c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Jun 2025 00:33:03 -0700 Subject: [PATCH 597/632] build(deps): bump pypa/cibuildwheel from 3.0.0b4 to 3.0.0rc2 (#1354) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 3.0.0b4 to 3.0.0rc2. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v3.0.0b4...v3.0.0rc2) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-version: 3.0.0rc2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3306be71..921e871e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -44,7 +44,7 @@ jobs: if: github.event_name != 'release' run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v3.0.0b4 + uses: pypa/cibuildwheel@v3.0.0rc2 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 0b1a9810ae4508710c6e3872ed9f27058a99e9ca Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 10 Jun 2025 08:23:03 +0100 Subject: [PATCH 598/632] Use poe as a task runner (#1355) Make `hatch run foo` wrap the corresponding `poe` command. --- pyproject.toml | 33 ++++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 27dbb710..71164512 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,6 +49,7 @@ dependencies = [ "hypothesis>=4.36.0", "hypothesmith>=0.0.4", "maturin>=1.7.0,<1.8", + "poethepoet>=0.35.0", "prompt-toolkit>=2.0.9", "pyre-check==0.9.18; platform_system != 'Windows'", "setuptools_scm>=6.0.1", @@ -58,17 +59,27 @@ dependencies = [ "slotscheck>=0.7.1", ] -[tool.hatch.envs.default.scripts] -fixtures = ["python scripts/regenerate-fixtures.py", "git diff --exit-code"] +[tool.poe.tasks] +fixtures = ["regenerate-fixtures", "_assert_no_changes"] +regenerate-fixtures = "python scripts/regenerate-fixtures.py" +_assert_no_changes = "git diff --exit-code" + format = "ufmt format libcst scripts" -lint = [ - "flake8 libcst", - "ufmt check libcst scripts", - "python -m slotscheck libcst", - "python scripts/check_copyright.py", -] -test = ["python --version", "python -m coverage run -m libcst.tests"] -typecheck = ["pyre --version", "pyre check"] +_flake8 = "flake8 libcst" +_ufmt = "ufmt check libcst scripts" +_slotscheck = "python -m slotscheck libcst" +_check_copyright = "python scripts/check_copyright.py" +lint = ["_flake8", "_ufmt", "_slotscheck", "_check_copyright"] +test = "python -m coverage run -m libcst.tests" +typecheck = "pyre check" +docs = "sphinx-build -ab html docs/source docs/build" + +[tool.hatch.envs.default.scripts] +fixtures = "poe fixtures" +format = "poe format" +lint = "poe lint" +test = "poe test" +typecheck = "poe typecheck" [tool.hatch.envs.docs] extra-dependencies = [ @@ -79,7 +90,7 @@ extra-dependencies = [ "jinja2==3.1.6", ] [tool.hatch.envs.docs.scripts] -docs = "sphinx-build -ab html docs/source docs/build" +docs = "poe docs" [tool.slotscheck] exclude-modules = '^libcst\.(testing|tests)' From db38266f1d72f819614bb9ea641ca160ba677c88 Mon Sep 17 00:00:00 2001 From: Lysandros Nikolaou Date: Tue, 10 Jun 2025 18:21:21 +0200 Subject: [PATCH 599/632] Upgrade PyYAML-ft version and use new module name (#1353) * Upgrade PyYAML-ft version and use new module name * add pyre ignore --------- Co-authored-by: Zsolt Dollenstein --- libcst/tool.py | 5 ++++- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/libcst/tool.py b/libcst/tool.py index 6dc410cc..a2164b11 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -19,7 +19,10 @@ import textwrap from abc import ABC, abstractmethod from typing import Any, Callable, Dict, List, Tuple, Type -import yaml +try: + import yaml_ft as yaml # pyre-ignore +except ModuleNotFoundError: + import yaml from libcst import CSTLogicError, LIBCST_VERSION, parse_module, PartialParserConfig from libcst._parser.parso.utils import parse_version_string diff --git a/pyproject.toml b/pyproject.toml index 71164512..3c497f97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ classifiers = [ requires-python = ">=3.9" dependencies = [ "pyyaml>=5.2; python_version < '3.13'", - "pyyaml-ft; python_version >= '3.13'", + "pyyaml-ft>=8.0.0; python_version >= '3.13'", ] [project.urls] From ab12c4c266457196216ea3cc0d3e0509c19b01e8 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 10 Jun 2025 17:29:03 +0100 Subject: [PATCH 600/632] bump version to 1.8.1 (#1357) --- CHANGELOG.md | 14 ++++++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 19 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb50a57a..945d056d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,17 @@ +# 1.8.1 - 2025-06-10 + +## Added +* add helper to convert nodes to matchers by @zsol in https://github.com/Instagram/LibCST/pull/1351 + +## Updated +* Avoid raising bare Exception by @zaicruvoir1rominet in https://github.com/Instagram/LibCST/pull/1168 +* Upgrade PyYAML-ft version and use new module name by @lysnikolaou in https://github.com/Instagram/LibCST/pull/1353 + +## New Contributors +* @lysnikolaou made their first contribution in https://github.com/Instagram/LibCST/pull/1353 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.0...v1.8.1 + # 1.8.0 - 2025-05-27 ## Added diff --git a/native/Cargo.lock b/native/Cargo.lock index 2db93c61..9501a525 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -286,7 +286,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.8.0" +version = "1.8.1" dependencies = [ "annotate-snippets", "criterion", @@ -304,7 +304,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.8.0" +version = "1.8.1" dependencies = [ "quote", "syn", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 309eeec2..2e0b7be3 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.8.0" +version = "1.8.1" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.5" annotate-snippets = "0.11.5" regex = "1.11.1" memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.8.0" } +libcst_derive = { path = "../libcst_derive", version = "1.8.1" } [dev-dependencies] criterion = { version = "0.6.0", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 78c8554f..566f74e2 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.8.0" +version = "1.8.1" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From 8c35ae20efb3e1cb8fcee868bcd407beff5bd294 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Tue, 10 Jun 2025 21:58:40 +0100 Subject: [PATCH 601/632] Switch from hatch to uv (#1356) * use dependency-groups in pyproject.toml * replace `hatch run foo` with `uv run poe foo` * install uv @ 0.7.12 in CI and disable caching * use `uv run --group docs` for the `docs` command * DRY docs between CONTRIBUTING and README * tell pyre to ignore `.venv` * set up uv to rebuild on rust, pyproject.toml, git changes --- .github/workflows/ci.yml | 56 +- .github/workflows/pypi_upload.yml | 9 +- .pyre_configuration | 3 + CONTRIBUTING.md | 12 +- README.rst | 52 +- pyproject.toml | 60 +- uv.lock | 2773 +++++++++++++++++++++++++++++ 7 files changed, 2844 insertions(+), 121 deletions(-) create mode 100644 uv.lock diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 87a2ea11..ce969e24 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,35 +25,29 @@ jobs: - "3.14" - "3.14t" steps: - - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v5 with: - python-version: "3.10" - - name: Install hatch - run: pip install -U hatch + version: "0.7.12" + python-version: ${{ matrix.python-version }} - uses: actions/checkout@v4 with: fetch-depth: 0 persist-credentials: false - - uses: actions/setup-python@v5 - with: - cache: pip - cache-dependency-path: "pyproject.toml" - python-version: ${{ matrix.python-version }} - allow-prereleases: true - uses: dtolnay/rust-toolchain@stable - name: Build LibCST - run: hatch -vv env create + run: uv sync --locked --dev - name: Native Parser Tests - run: hatch run test + run: uv run poe test - name: Pure Parser Tests env: COVERAGE_FILE: .coverage.pure LIBCST_PARSER_TYPE: pure - run: hatch run test + run: uv run poe test - name: Coverage run: | - hatch run coverage combine .coverage.pure - hatch run coverage report + uv run coverage combine .coverage.pure + uv run coverage report # Run linters lint: @@ -63,15 +57,13 @@ jobs: with: fetch-depth: 0 persist-credentials: false - - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v5 with: - cache: pip - cache-dependency-path: "pyproject.toml" + version: "0.7.12" python-version: "3.10" - - name: Install hatch - run: pip install -U hatch - - run: hatch run lint - - run: hatch run fixtures + - run: uv run poe lint + - run: uv run poe fixtures # Run pyre typechecker typecheck: @@ -81,14 +73,12 @@ jobs: with: fetch-depth: 0 persist-credentials: false - - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v5 with: - cache: pip - cache-dependency-path: "pyproject.toml" + version: "0.7.12" python-version: "3.10" - - name: Install hatch - run: pip install -U hatch - - run: hatch run typecheck + - run: uv run poe typecheck # Build the docs docs: @@ -98,15 +88,13 @@ jobs: with: fetch-depth: 0 persist-credentials: false - - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v5 with: - cache: pip - cache-dependency-path: "pyproject.toml" + version: "0.7.12" python-version: "3.10" - - name: Install hatch - run: pip install -U hatch - uses: ts-graphviz/setup-graphviz@v2 - - run: hatch run docs:docs + - run: uv run --group docs poe docs - name: Archive Docs uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index e8798c6e..3a16857a 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -33,14 +33,17 @@ jobs: - uses: actions/setup-python@v5 with: python-version: "3.10" - - name: Install hatch - run: pip install -U hatch + - name: Install uv + uses: astral-sh/setup-uv@v5 + with: + version: "0.7.12" + enable-cache: false - name: Build a source tarball env: LIBCST_NO_LOCAL_SCHEME: 1 OUTDIR: ${{ steps.download.outputs.download-path }} run: >- - hatch run python -m + uv run python -m build --sdist --outdir "$OUTDIR" diff --git a/.pyre_configuration b/.pyre_configuration index ae37b031..cf108076 100644 --- a/.pyre_configuration +++ b/.pyre_configuration @@ -2,6 +2,9 @@ "exclude": [ ".*\/native\/.*" ], + "ignore_all_errors": [ + ".venv" + ], "source_directories": [ "." ], diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c69049cd..2e35431d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -11,7 +11,7 @@ We actively welcome your pull requests. ### Setup Your Environment -1. Install a [Rust toolchain](https://rustup.rs) and [hatch](https://hatch.pypa.io) +1. Install a [Rust toolchain](https://rustup.rs) and [uv](https://docs.astral.sh/uv/) 2. Fork the repo on your side 3. Clone the repo > git clone [your fork.git] libcst @@ -19,7 +19,7 @@ We actively welcome your pull requests. 4. Sync with the main libcst version package > git fetch --tags https://github.com/instagram/libcst 5. Setup the env - > hatch env create + > uv sync You are now ready to create your own branch from main, and contribute. Please provide tests (using unittest), and update the documentation (both docstrings @@ -28,13 +28,13 @@ and sphinx doc), if applicable. ### Before Submitting Your Pull Request 1. Format your code - > hatch run format + > uv run poe format 2. Run the type checker - > hatch run typecheck + > uv run poe typecheck 3. Test your changes - > hatch run test + > uv run poe test 4. Check linters - > hatch run lint + > uv run poe lint ## Contributor License Agreement ("CLA") In order to accept your pull request, we need you to submit a CLA. You only need diff --git a/README.rst b/README.rst index 78d29820..0919ed8f 100644 --- a/README.rst +++ b/README.rst @@ -148,49 +148,7 @@ Further Reading Development ----------- -You'll need a recent `Rust toolchain `_ for developing. - -We recommend using `hatch ` for running tests, linters, -etc. - -Then, start by setting up and building the project: - -.. code-block:: shell - - git clone git@github.com:Instagram/LibCST.git libcst - cd libcst - hatch env create - -To run the project's test suite, you can: - -.. code-block:: shell - - hatch run test - -You can also run individual tests by using unittest and specifying a module like -this: - -.. code-block:: shell - - hatch run python -m unittest libcst.tests.test_batched_visitor - -See the `unittest documentation `_ -for more examples of how to run tests. - -We have multiple linters, including copyright checks and -`slotscheck `_ to check the correctness of class -``__slots__``. To run all of the linters: - -.. code-block:: shell - - hatch run lint - -We use `ufmt `_ to format code. To format -changes to be conformant, run the following in the root: - -.. code-block:: shell - - hatch run format +See `CONTRIBUTING.md `_ for more details. Building ~~~~~~~~ @@ -208,11 +166,11 @@ directory: cargo build -To rebuild the ``libcst.native`` module, from the repo root: +The ``libcst.native`` module should be rebuilt automatically, but to force it: .. code-block:: shell - hatch env prune && hatch env create + uv sync --reinstall-package libcst Type Checking ~~~~~~~~~~~~~ @@ -223,7 +181,7 @@ To verify types for the library, do the following in the root: .. code-block:: shell - hatch run typecheck + uv run poe typecheck Generating Documents ~~~~~~~~~~~~~~~~~~~~ @@ -232,7 +190,7 @@ To generate documents, do the following in the root: .. code-block:: shell - hatch run docs + uv run --group docs poe docs Future ====== diff --git a/pyproject.toml b/pyproject.toml index 3c497f97..b4b15320 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,19 +28,8 @@ Documentation = "https://libcst.readthedocs.io/en/latest/" Github = "https://github.com/Instagram/LibCST" Changelog = "https://github.com/Instagram/LibCST/blob/main/CHANGELOG.md" -[tool.black] -target-version = ["py39"] -extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format - -[tool.coverage.report] -fail_under = 93 -precision = 1 -show_missing = true -skip_covered = true - -[tool.hatch.envs.default] -installer = "uv" -dependencies = [ +[dependency-groups] +dev = [ "black==25.1.0", "coverage[toml]>=4.5.4", "build>=0.10.0", @@ -58,6 +47,33 @@ dependencies = [ "setuptools-rust>=1.5.2", "slotscheck>=0.7.1", ] +docs = [ + {include-group = "dev"}, + "Sphinx>=5.1.1", + "sphinx-rtd-theme>=0.4.3", + "jupyter>=1.0.0", + "nbsphinx>=0.4.2", + "jinja2==3.1.6", +] + +[tool.black] +target-version = ["py39"] +extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format + +[tool.coverage.report] +fail_under = 93 +precision = 1 +show_missing = true +skip_covered = true + +[tool.uv] +cache-keys = [ + { file = "pyproject.toml" }, + { git = {commit = true, tags = true}}, + { file = "**/*.rs"}, + { file = "**/Cargo.toml"}, + { file = "**/Cargo.lock"}, +] [tool.poe.tasks] fixtures = ["regenerate-fixtures", "_assert_no_changes"] @@ -74,24 +90,6 @@ test = "python -m coverage run -m libcst.tests" typecheck = "pyre check" docs = "sphinx-build -ab html docs/source docs/build" -[tool.hatch.envs.default.scripts] -fixtures = "poe fixtures" -format = "poe format" -lint = "poe lint" -test = "poe test" -typecheck = "poe typecheck" - -[tool.hatch.envs.docs] -extra-dependencies = [ - "Sphinx>=5.1.1", - "sphinx-rtd-theme>=0.4.3", - "jupyter>=1.0.0", - "nbsphinx>=0.4.2", - "jinja2==3.1.6", -] -[tool.hatch.envs.docs.scripts] -docs = "poe docs" - [tool.slotscheck] exclude-modules = '^libcst\.(testing|tests)' diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..ad68fefe --- /dev/null +++ b/uv.lock @@ -0,0 +1,2773 @@ +version = 1 +revision = 2 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version >= '3.11' and python_full_version < '3.13'", + "python_full_version == '3.10.*'", + "python_full_version < '3.10'", +] + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version >= '3.11' and python_full_version < '3.13'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + +[[package]] +name = "anyio" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, +] + +[[package]] +name = "appnope" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, +] + +[[package]] +name = "argon2-cffi" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argon2-cffi-bindings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/89/ce5af8a7d472a67cc819d5d998aa8c82c5d860608c4db9f46f1162d7dab9/argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1", size = 45706, upload-time = "2025-06-03T06:55:32.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741", size = 14657, upload-time = "2025-06-03T06:55:30.804Z" }, +] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/e9/184b8ccce6683b0aa2fbb7ba5683ea4b9c5763f1356347f1312c32e3c66e/argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3", size = 1779911, upload-time = "2021-12-01T08:52:55.68Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/13/838ce2620025e9666aa8f686431f67a29052241692a3dd1ae9d3692a89d3/argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367", size = 29658, upload-time = "2021-12-01T09:09:17.016Z" }, + { url = "https://files.pythonhosted.org/packages/b3/02/f7f7bb6b6af6031edb11037639c697b912e1dea2db94d436e681aea2f495/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d", size = 80583, upload-time = "2021-12-01T09:09:19.546Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f7/378254e6dd7ae6f31fe40c8649eea7d4832a42243acaf0f1fff9083b2bed/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae", size = 86168, upload-time = "2021-12-01T09:09:21.445Z" }, + { url = "https://files.pythonhosted.org/packages/74/f6/4a34a37a98311ed73bb80efe422fed95f2ac25a4cacc5ae1d7ae6a144505/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c", size = 82709, upload-time = "2021-12-01T09:09:18.182Z" }, + { url = "https://files.pythonhosted.org/packages/74/2b/73d767bfdaab25484f7e7901379d5f8793cccbb86c6e0cbc4c1b96f63896/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86", size = 83613, upload-time = "2021-12-01T09:09:22.741Z" }, + { url = "https://files.pythonhosted.org/packages/4f/fd/37f86deef67ff57c76f137a67181949c2d408077e2e3dd70c6c42912c9bf/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f", size = 84583, upload-time = "2021-12-01T09:09:24.177Z" }, + { url = "https://files.pythonhosted.org/packages/6f/52/5a60085a3dae8fded8327a4f564223029f5f54b0cb0455a31131b5363a01/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e", size = 88475, upload-time = "2021-12-01T09:09:26.673Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/143cd64feb24a15fa4b189a3e1e7efbaeeb00f39a51e99b26fc62fbacabd/argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082", size = 27698, upload-time = "2021-12-01T09:09:27.87Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/e34e47c7dee97ba6f01a6203e0383e15b60fb85d78ac9a15cd066f6fe28b/argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f", size = 30817, upload-time = "2021-12-01T09:09:30.267Z" }, + { url = "https://files.pythonhosted.org/packages/5a/e4/bf8034d25edaa495da3c8a3405627d2e35758e44ff6eaa7948092646fdcc/argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93", size = 53104, upload-time = "2021-12-01T09:09:31.335Z" }, +] + +[[package]] +name = "arrow" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "types-python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960, upload-time = "2023-09-30T22:11:18.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419, upload-time = "2023-09-30T22:11:16.072Z" }, +] + +[[package]] +name = "asttokens" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978, upload-time = "2024-11-30T04:30:14.439Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918, upload-time = "2024-11-30T04:30:10.946Z" }, +] + +[[package]] +name = "async-lru" +version = "2.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.13.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" }, +] + +[[package]] +name = "black" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419, upload-time = "2025-01-29T05:37:06.642Z" }, + { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080, upload-time = "2025-01-29T05:37:09.321Z" }, + { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886, upload-time = "2025-01-29T04:18:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404, upload-time = "2025-01-29T04:19:04.296Z" }, + { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372, upload-time = "2025-01-29T05:37:11.71Z" }, + { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865, upload-time = "2025-01-29T05:37:14.309Z" }, + { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699, upload-time = "2025-01-29T04:18:17.688Z" }, + { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028, upload-time = "2025-01-29T04:18:51.711Z" }, + { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" }, + { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" }, + { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" }, + { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" }, + { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" }, + { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" }, + { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" }, + { url = "https://files.pythonhosted.org/packages/d3/b6/ae7507470a4830dbbfe875c701e84a4a5fb9183d1497834871a715716a92/black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0", size = 1628593, upload-time = "2025-01-29T05:37:23.672Z" }, + { url = "https://files.pythonhosted.org/packages/24/c1/ae36fa59a59f9363017ed397750a0cd79a470490860bc7713967d89cdd31/black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f", size = 1460000, upload-time = "2025-01-29T05:37:25.829Z" }, + { url = "https://files.pythonhosted.org/packages/ac/b6/98f832e7a6c49aa3a464760c67c7856363aa644f2f3c74cf7d624168607e/black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e", size = 1765963, upload-time = "2025-01-29T04:18:38.116Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e9/2cb0a017eb7024f70e0d2e9bdb8c5a5b078c5740c7f8816065d06f04c557/black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355", size = 1419419, upload-time = "2025-01-29T04:18:30.191Z" }, + { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, +] + +[[package]] +name = "bleach" +version = "6.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083, upload-time = "2024-10-29T18:30:40.477Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406, upload-time = "2024-10-29T18:30:38.186Z" }, +] + +[package.optional-dependencies] +css = [ + { name = "tinycss2" }, +] + +[[package]] +name = "build" +version = "1.2.2.post1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "os_name == 'nt'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10.2'" }, + { name = "packaging" }, + { name = "pyproject-hooks" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/46/aeab111f8e06793e4f0e421fcad593d547fb8313b50990f31681ee2fb1ad/build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7", size = 46701, upload-time = "2024-10-06T17:22:25.251Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/c2/80633736cd183ee4a62107413def345f7e6e3c01563dbca1417363cf957e/build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5", size = 22950, upload-time = "2024-10-06T17:22:23.299Z" }, +] + +[[package]] +name = "certifi" +version = "2025.4.26" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191, upload-time = "2024-09-04T20:43:30.027Z" }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592, upload-time = "2024-09-04T20:43:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024, upload-time = "2024-09-04T20:43:34.186Z" }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188, upload-time = "2024-09-04T20:43:36.286Z" }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571, upload-time = "2024-09-04T20:43:38.586Z" }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687, upload-time = "2024-09-04T20:43:40.084Z" }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211, upload-time = "2024-09-04T20:43:41.526Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325, upload-time = "2024-09-04T20:43:43.117Z" }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784, upload-time = "2024-09-04T20:43:45.256Z" }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564, upload-time = "2024-09-04T20:43:46.779Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804, upload-time = "2024-09-04T20:43:48.186Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299, upload-time = "2024-09-04T20:43:49.812Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220, upload-time = "2024-09-04T20:45:01.577Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605, upload-time = "2024-09-04T20:45:03.837Z" }, + { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910, upload-time = "2024-09-04T20:45:05.315Z" }, + { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200, upload-time = "2024-09-04T20:45:06.903Z" }, + { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565, upload-time = "2024-09-04T20:45:08.975Z" }, + { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635, upload-time = "2024-09-04T20:45:10.64Z" }, + { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218, upload-time = "2024-09-04T20:45:12.366Z" }, + { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486, upload-time = "2024-09-04T20:45:13.935Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911, upload-time = "2024-09-04T20:45:15.696Z" }, + { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632, upload-time = "2024-09-04T20:45:17.284Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820, upload-time = "2024-09-04T20:45:18.762Z" }, + { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290, upload-time = "2024-09-04T20:45:20.226Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, + { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, + { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, + { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, + { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/28/f8/dfb01ff6cc9af38552c69c9027501ff5a5117c4cc18dcd27cb5259fa1888/charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", size = 201671, upload-time = "2025-05-02T08:34:12.696Z" }, + { url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", size = 144744, upload-time = "2025-05-02T08:34:14.665Z" }, + { url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", size = 154993, upload-time = "2025-05-02T08:34:17.134Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", size = 147382, upload-time = "2025-05-02T08:34:19.081Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", size = 149536, upload-time = "2025-05-02T08:34:21.073Z" }, + { url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", size = 151349, upload-time = "2025-05-02T08:34:23.193Z" }, + { url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", size = 146365, upload-time = "2025-05-02T08:34:25.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", size = 154499, upload-time = "2025-05-02T08:34:27.359Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", size = 157735, upload-time = "2025-05-02T08:34:29.798Z" }, + { url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", size = 154786, upload-time = "2025-05-02T08:34:31.858Z" }, + { url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", size = 150203, upload-time = "2025-05-02T08:34:33.88Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9d/9bf2b005138e7e060d7ebdec7503d0ef3240141587651f4b445bdf7286c2/charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", size = 98436, upload-time = "2025-05-02T08:34:35.907Z" }, + { url = "https://files.pythonhosted.org/packages/6d/24/5849d46cf4311bbf21b424c443b09b459f5b436b1558c04e45dbb7cc478b/charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", size = 105772, upload-time = "2025-05-02T08:34:37.935Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, +] + +[[package]] +name = "click" +version = "8.2.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version >= '3.11' and python_full_version < '3.13'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "comm" +version = "0.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210, upload-time = "2024-03-12T16:53:41.133Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180, upload-time = "2024-03-12T16:53:39.226Z" }, +] + +[[package]] +name = "coverage" +version = "7.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/07/998afa4a0ecdf9b1981ae05415dad2d4e7716e1b1f00abbd91691ac09ac9/coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27", size = 812759, upload-time = "2025-05-23T11:39:57.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/6b/7dd06399a5c0b81007e3a6af0395cd60e6a30f959f8d407d3ee04642e896/coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a", size = 211573, upload-time = "2025-05-23T11:37:47.207Z" }, + { url = "https://files.pythonhosted.org/packages/f0/df/2b24090820a0bac1412955fb1a4dade6bc3b8dcef7b899c277ffaf16916d/coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be", size = 212006, upload-time = "2025-05-23T11:37:50.289Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c4/e4e3b998e116625562a872a342419652fa6ca73f464d9faf9f52f1aff427/coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3", size = 241128, upload-time = "2025-05-23T11:37:52.229Z" }, + { url = "https://files.pythonhosted.org/packages/b1/67/b28904afea3e87a895da850ba587439a61699bf4b73d04d0dfd99bbd33b4/coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6", size = 239026, upload-time = "2025-05-23T11:37:53.846Z" }, + { url = "https://files.pythonhosted.org/packages/8c/0f/47bf7c5630d81bc2cd52b9e13043685dbb7c79372a7f5857279cc442b37c/coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622", size = 240172, upload-time = "2025-05-23T11:37:55.711Z" }, + { url = "https://files.pythonhosted.org/packages/ba/38/af3eb9d36d85abc881f5aaecf8209383dbe0fa4cac2d804c55d05c51cb04/coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c", size = 240086, upload-time = "2025-05-23T11:37:57.724Z" }, + { url = "https://files.pythonhosted.org/packages/9e/64/c40c27c2573adeba0fe16faf39a8aa57368a1f2148865d6bb24c67eadb41/coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3", size = 238792, upload-time = "2025-05-23T11:37:59.737Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ab/b7c85146f15457671c1412afca7c25a5696d7625e7158002aa017e2d7e3c/coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404", size = 239096, upload-time = "2025-05-23T11:38:01.693Z" }, + { url = "https://files.pythonhosted.org/packages/d3/50/9446dad1310905fb1dc284d60d4320a5b25d4e3e33f9ea08b8d36e244e23/coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7", size = 214144, upload-time = "2025-05-23T11:38:03.68Z" }, + { url = "https://files.pythonhosted.org/packages/23/ed/792e66ad7b8b0df757db8d47af0c23659cdb5a65ef7ace8b111cacdbee89/coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347", size = 215043, upload-time = "2025-05-23T11:38:05.217Z" }, + { url = "https://files.pythonhosted.org/packages/6a/4d/1ff618ee9f134d0de5cc1661582c21a65e06823f41caf801aadf18811a8e/coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9", size = 211692, upload-time = "2025-05-23T11:38:08.485Z" }, + { url = "https://files.pythonhosted.org/packages/96/fa/c3c1b476de96f2bc7a8ca01a9f1fcb51c01c6b60a9d2c3e66194b2bdb4af/coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879", size = 212115, upload-time = "2025-05-23T11:38:09.989Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c2/5414c5a1b286c0f3881ae5adb49be1854ac5b7e99011501f81c8c1453065/coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a", size = 244740, upload-time = "2025-05-23T11:38:11.947Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/1ae01912dfb06a642ef3dd9cf38ed4996fda8fe884dab8952da616f81a2b/coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5", size = 242429, upload-time = "2025-05-23T11:38:13.955Z" }, + { url = "https://files.pythonhosted.org/packages/06/58/38c676aec594bfe2a87c7683942e5a30224791d8df99bcc8439fde140377/coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11", size = 244218, upload-time = "2025-05-23T11:38:15.631Z" }, + { url = "https://files.pythonhosted.org/packages/80/0c/95b1023e881ce45006d9abc250f76c6cdab7134a1c182d9713878dfefcb2/coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a", size = 243865, upload-time = "2025-05-23T11:38:17.622Z" }, + { url = "https://files.pythonhosted.org/packages/57/37/0ae95989285a39e0839c959fe854a3ae46c06610439350d1ab860bf020ac/coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb", size = 242038, upload-time = "2025-05-23T11:38:19.966Z" }, + { url = "https://files.pythonhosted.org/packages/4d/82/40e55f7c0eb5e97cc62cbd9d0746fd24e8caf57be5a408b87529416e0c70/coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54", size = 242567, upload-time = "2025-05-23T11:38:21.912Z" }, + { url = "https://files.pythonhosted.org/packages/f9/35/66a51adc273433a253989f0d9cc7aa6bcdb4855382cf0858200afe578861/coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a", size = 214194, upload-time = "2025-05-23T11:38:23.571Z" }, + { url = "https://files.pythonhosted.org/packages/f6/8f/a543121f9f5f150eae092b08428cb4e6b6d2d134152c3357b77659d2a605/coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975", size = 215109, upload-time = "2025-05-23T11:38:25.137Z" }, + { url = "https://files.pythonhosted.org/packages/77/65/6cc84b68d4f35186463cd7ab1da1169e9abb59870c0f6a57ea6aba95f861/coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53", size = 213521, upload-time = "2025-05-23T11:38:27.123Z" }, + { url = "https://files.pythonhosted.org/packages/8d/2a/1da1ada2e3044fcd4a3254fb3576e160b8fe5b36d705c8a31f793423f763/coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c", size = 211876, upload-time = "2025-05-23T11:38:29.01Z" }, + { url = "https://files.pythonhosted.org/packages/70/e9/3d715ffd5b6b17a8be80cd14a8917a002530a99943cc1939ad5bb2aa74b9/coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1", size = 212130, upload-time = "2025-05-23T11:38:30.675Z" }, + { url = "https://files.pythonhosted.org/packages/a0/02/fdce62bb3c21649abfd91fbdcf041fb99be0d728ff00f3f9d54d97ed683e/coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279", size = 246176, upload-time = "2025-05-23T11:38:32.395Z" }, + { url = "https://files.pythonhosted.org/packages/a7/52/decbbed61e03b6ffe85cd0fea360a5e04a5a98a7423f292aae62423b8557/coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99", size = 243068, upload-time = "2025-05-23T11:38:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/38/6c/d0e9c0cce18faef79a52778219a3c6ee8e336437da8eddd4ab3dbd8fadff/coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20", size = 245328, upload-time = "2025-05-23T11:38:35.568Z" }, + { url = "https://files.pythonhosted.org/packages/f0/70/f703b553a2f6b6c70568c7e398ed0789d47f953d67fbba36a327714a7bca/coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2", size = 245099, upload-time = "2025-05-23T11:38:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fb/4cbb370dedae78460c3aacbdad9d249e853f3bc4ce5ff0e02b1983d03044/coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57", size = 243314, upload-time = "2025-05-23T11:38:39.238Z" }, + { url = "https://files.pythonhosted.org/packages/39/9f/1afbb2cb9c8699b8bc38afdce00a3b4644904e6a38c7bf9005386c9305ec/coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f", size = 244489, upload-time = "2025-05-23T11:38:40.845Z" }, + { url = "https://files.pythonhosted.org/packages/79/fa/f3e7ec7d220bff14aba7a4786ae47043770cbdceeea1803083059c878837/coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8", size = 214366, upload-time = "2025-05-23T11:38:43.551Z" }, + { url = "https://files.pythonhosted.org/packages/54/aa/9cbeade19b7e8e853e7ffc261df885d66bf3a782c71cba06c17df271f9e6/coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223", size = 215165, upload-time = "2025-05-23T11:38:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/c4/73/e2528bf1237d2448f882bbebaec5c3500ef07301816c5c63464b9da4d88a/coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f", size = 213548, upload-time = "2025-05-23T11:38:46.74Z" }, + { url = "https://files.pythonhosted.org/packages/1a/93/eb6400a745ad3b265bac36e8077fdffcf0268bdbbb6c02b7220b624c9b31/coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca", size = 211898, upload-time = "2025-05-23T11:38:49.066Z" }, + { url = "https://files.pythonhosted.org/packages/1b/7c/bdbf113f92683024406a1cd226a199e4200a2001fc85d6a6e7e299e60253/coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d", size = 212171, upload-time = "2025-05-23T11:38:51.207Z" }, + { url = "https://files.pythonhosted.org/packages/91/22/594513f9541a6b88eb0dba4d5da7d71596dadef6b17a12dc2c0e859818a9/coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85", size = 245564, upload-time = "2025-05-23T11:38:52.857Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f4/2860fd6abeebd9f2efcfe0fd376226938f22afc80c1943f363cd3c28421f/coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257", size = 242719, upload-time = "2025-05-23T11:38:54.529Z" }, + { url = "https://files.pythonhosted.org/packages/89/60/f5f50f61b6332451520e6cdc2401700c48310c64bc2dd34027a47d6ab4ca/coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108", size = 244634, upload-time = "2025-05-23T11:38:57.326Z" }, + { url = "https://files.pythonhosted.org/packages/3b/70/7f4e919039ab7d944276c446b603eea84da29ebcf20984fb1fdf6e602028/coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0", size = 244824, upload-time = "2025-05-23T11:38:59.421Z" }, + { url = "https://files.pythonhosted.org/packages/26/45/36297a4c0cea4de2b2c442fe32f60c3991056c59cdc3cdd5346fbb995c97/coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050", size = 242872, upload-time = "2025-05-23T11:39:01.049Z" }, + { url = "https://files.pythonhosted.org/packages/a4/71/e041f1b9420f7b786b1367fa2a375703889ef376e0d48de9f5723fb35f11/coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48", size = 244179, upload-time = "2025-05-23T11:39:02.709Z" }, + { url = "https://files.pythonhosted.org/packages/bd/db/3c2bf49bdc9de76acf2491fc03130c4ffc51469ce2f6889d2640eb563d77/coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7", size = 214393, upload-time = "2025-05-23T11:39:05.457Z" }, + { url = "https://files.pythonhosted.org/packages/c6/dc/947e75d47ebbb4b02d8babb1fad4ad381410d5bc9da7cfca80b7565ef401/coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3", size = 215194, upload-time = "2025-05-23T11:39:07.171Z" }, + { url = "https://files.pythonhosted.org/packages/90/31/a980f7df8a37eaf0dc60f932507fda9656b3a03f0abf188474a0ea188d6d/coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7", size = 213580, upload-time = "2025-05-23T11:39:08.862Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6a/25a37dd90f6c95f59355629417ebcb74e1c34e38bb1eddf6ca9b38b0fc53/coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008", size = 212734, upload-time = "2025-05-23T11:39:11.109Z" }, + { url = "https://files.pythonhosted.org/packages/36/8b/3a728b3118988725f40950931abb09cd7f43b3c740f4640a59f1db60e372/coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36", size = 212959, upload-time = "2025-05-23T11:39:12.751Z" }, + { url = "https://files.pythonhosted.org/packages/53/3c/212d94e6add3a3c3f412d664aee452045ca17a066def8b9421673e9482c4/coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46", size = 257024, upload-time = "2025-05-23T11:39:15.569Z" }, + { url = "https://files.pythonhosted.org/packages/a4/40/afc03f0883b1e51bbe804707aae62e29c4e8c8bbc365c75e3e4ddeee9ead/coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be", size = 252867, upload-time = "2025-05-23T11:39:17.64Z" }, + { url = "https://files.pythonhosted.org/packages/18/a2/3699190e927b9439c6ded4998941a3c1d6fa99e14cb28d8536729537e307/coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740", size = 255096, upload-time = "2025-05-23T11:39:19.328Z" }, + { url = "https://files.pythonhosted.org/packages/b4/06/16e3598b9466456b718eb3e789457d1a5b8bfb22e23b6e8bbc307df5daf0/coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625", size = 256276, upload-time = "2025-05-23T11:39:21.077Z" }, + { url = "https://files.pythonhosted.org/packages/a7/d5/4b5a120d5d0223050a53d2783c049c311eea1709fa9de12d1c358e18b707/coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b", size = 254478, upload-time = "2025-05-23T11:39:22.838Z" }, + { url = "https://files.pythonhosted.org/packages/ba/85/f9ecdb910ecdb282b121bfcaa32fa8ee8cbd7699f83330ee13ff9bbf1a85/coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199", size = 255255, upload-time = "2025-05-23T11:39:24.644Z" }, + { url = "https://files.pythonhosted.org/packages/50/63/2d624ac7d7ccd4ebbd3c6a9eba9d7fc4491a1226071360d59dd84928ccb2/coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8", size = 215109, upload-time = "2025-05-23T11:39:26.722Z" }, + { url = "https://files.pythonhosted.org/packages/22/5e/7053b71462e970e869111c1853afd642212568a350eba796deefdfbd0770/coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d", size = 216268, upload-time = "2025-05-23T11:39:28.429Z" }, + { url = "https://files.pythonhosted.org/packages/07/69/afa41aa34147655543dbe96994f8a246daf94b361ccf5edfd5df62ce066a/coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b", size = 214071, upload-time = "2025-05-23T11:39:30.55Z" }, + { url = "https://files.pythonhosted.org/packages/71/1e/388267ad9c6aa126438acc1ceafede3bb746afa9872e3ec5f0691b7d5efa/coverage-7.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a", size = 211566, upload-time = "2025-05-23T11:39:32.333Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a5/acc03e5cf0bba6357f5e7c676343de40fbf431bb1e115fbebf24b2f7f65e/coverage-7.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d", size = 211996, upload-time = "2025-05-23T11:39:34.512Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a2/0fc0a9f6b7c24fa4f1d7210d782c38cb0d5e692666c36eaeae9a441b6755/coverage-7.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca", size = 240741, upload-time = "2025-05-23T11:39:36.252Z" }, + { url = "https://files.pythonhosted.org/packages/e6/da/1c6ba2cf259710eed8916d4fd201dccc6be7380ad2b3b9f63ece3285d809/coverage-7.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d", size = 238672, upload-time = "2025-05-23T11:39:38.03Z" }, + { url = "https://files.pythonhosted.org/packages/ac/51/c8fae0dc3ca421e6e2509503696f910ff333258db672800c3bdef256265a/coverage-7.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787", size = 239769, upload-time = "2025-05-23T11:39:40.24Z" }, + { url = "https://files.pythonhosted.org/packages/59/8e/b97042ae92c59f40be0c989df090027377ba53f2d6cef73c9ca7685c26a6/coverage-7.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7", size = 239555, upload-time = "2025-05-23T11:39:42.3Z" }, + { url = "https://files.pythonhosted.org/packages/47/35/b8893e682d6e96b1db2af5997fc13ef62219426fb17259d6844c693c5e00/coverage-7.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3", size = 237768, upload-time = "2025-05-23T11:39:44.069Z" }, + { url = "https://files.pythonhosted.org/packages/03/6c/023b0b9a764cb52d6243a4591dcb53c4caf4d7340445113a1f452bb80591/coverage-7.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7", size = 238757, upload-time = "2025-05-23T11:39:46.195Z" }, + { url = "https://files.pythonhosted.org/packages/03/ed/3af7e4d721bd61a8df7de6de9e8a4271e67f3d9e086454558fd9f48eb4f6/coverage-7.8.2-cp39-cp39-win32.whl", hash = "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a", size = 214166, upload-time = "2025-05-23T11:39:47.934Z" }, + { url = "https://files.pythonhosted.org/packages/9d/30/ee774b626773750dc6128354884652507df3c59d6aa8431526107e595227/coverage-7.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e", size = 215050, upload-time = "2025-05-23T11:39:50.252Z" }, + { url = "https://files.pythonhosted.org/packages/69/2f/572b29496d8234e4a7773200dd835a0d32d9e171f2d974f3fe04a9dbc271/coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837", size = 203636, upload-time = "2025-05-23T11:39:52.002Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1a/0b9c32220ad694d66062f571cc5cedfa9997b64a591e8a500bb63de1bd40/coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32", size = 203623, upload-time = "2025-05-23T11:39:53.846Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "dataclasses-json" +version = "0.6.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marshmallow" }, + { name = "typing-inspect" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227, upload-time = "2024-06-09T16:20:19.103Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" }, +] + +[[package]] +name = "debugpy" +version = "1.8.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/75/087fe07d40f490a78782ff3b0a30e3968936854105487decdb33446d4b0e/debugpy-1.8.14.tar.gz", hash = "sha256:7cd287184318416850aa8b60ac90105837bb1e59531898c07569d197d2ed5322", size = 1641444, upload-time = "2025-04-10T19:46:10.981Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/df/156df75a41aaebd97cee9d3870fe68f8001b6c1c4ca023e221cfce69bece/debugpy-1.8.14-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:93fee753097e85623cab1c0e6a68c76308cd9f13ffdf44127e6fab4fbf024339", size = 2076510, upload-time = "2025-04-10T19:46:13.315Z" }, + { url = "https://files.pythonhosted.org/packages/69/cd/4fc391607bca0996db5f3658762106e3d2427beaef9bfd363fd370a3c054/debugpy-1.8.14-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d937d93ae4fa51cdc94d3e865f535f185d5f9748efb41d0d49e33bf3365bd79", size = 3559614, upload-time = "2025-04-10T19:46:14.647Z" }, + { url = "https://files.pythonhosted.org/packages/1a/42/4e6d2b9d63e002db79edfd0cb5656f1c403958915e0e73ab3e9220012eec/debugpy-1.8.14-cp310-cp310-win32.whl", hash = "sha256:c442f20577b38cc7a9aafecffe1094f78f07fb8423c3dddb384e6b8f49fd2987", size = 5208588, upload-time = "2025-04-10T19:46:16.233Z" }, + { url = "https://files.pythonhosted.org/packages/97/b1/cc9e4e5faadc9d00df1a64a3c2d5c5f4b9df28196c39ada06361c5141f89/debugpy-1.8.14-cp310-cp310-win_amd64.whl", hash = "sha256:f117dedda6d969c5c9483e23f573b38f4e39412845c7bc487b6f2648df30fe84", size = 5241043, upload-time = "2025-04-10T19:46:17.768Z" }, + { url = "https://files.pythonhosted.org/packages/67/e8/57fe0c86915671fd6a3d2d8746e40485fd55e8d9e682388fbb3a3d42b86f/debugpy-1.8.14-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:1b2ac8c13b2645e0b1eaf30e816404990fbdb168e193322be8f545e8c01644a9", size = 2175064, upload-time = "2025-04-10T19:46:19.486Z" }, + { url = "https://files.pythonhosted.org/packages/3b/97/2b2fd1b1c9569c6764ccdb650a6f752e4ac31be465049563c9eb127a8487/debugpy-1.8.14-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf431c343a99384ac7eab2f763980724834f933a271e90496944195318c619e2", size = 3132359, upload-time = "2025-04-10T19:46:21.192Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ee/b825c87ed06256ee2a7ed8bab8fb3bb5851293bf9465409fdffc6261c426/debugpy-1.8.14-cp311-cp311-win32.whl", hash = "sha256:c99295c76161ad8d507b413cd33422d7c542889fbb73035889420ac1fad354f2", size = 5133269, upload-time = "2025-04-10T19:46:23.047Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a6/6c70cd15afa43d37839d60f324213843174c1d1e6bb616bd89f7c1341bac/debugpy-1.8.14-cp311-cp311-win_amd64.whl", hash = "sha256:7816acea4a46d7e4e50ad8d09d963a680ecc814ae31cdef3622eb05ccacf7b01", size = 5158156, upload-time = "2025-04-10T19:46:24.521Z" }, + { url = "https://files.pythonhosted.org/packages/d9/2a/ac2df0eda4898f29c46eb6713a5148e6f8b2b389c8ec9e425a4a1d67bf07/debugpy-1.8.14-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:8899c17920d089cfa23e6005ad9f22582fd86f144b23acb9feeda59e84405b84", size = 2501268, upload-time = "2025-04-10T19:46:26.044Z" }, + { url = "https://files.pythonhosted.org/packages/10/53/0a0cb5d79dd9f7039169f8bf94a144ad3efa52cc519940b3b7dde23bcb89/debugpy-1.8.14-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6bb5c0dcf80ad5dbc7b7d6eac484e2af34bdacdf81df09b6a3e62792b722826", size = 4221077, upload-time = "2025-04-10T19:46:27.464Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d5/84e01821f362327bf4828728aa31e907a2eca7c78cd7c6ec062780d249f8/debugpy-1.8.14-cp312-cp312-win32.whl", hash = "sha256:281d44d248a0e1791ad0eafdbbd2912ff0de9eec48022a5bfbc332957487ed3f", size = 5255127, upload-time = "2025-04-10T19:46:29.467Z" }, + { url = "https://files.pythonhosted.org/packages/33/16/1ed929d812c758295cac7f9cf3dab5c73439c83d9091f2d91871e648093e/debugpy-1.8.14-cp312-cp312-win_amd64.whl", hash = "sha256:5aa56ef8538893e4502a7d79047fe39b1dae08d9ae257074c6464a7b290b806f", size = 5297249, upload-time = "2025-04-10T19:46:31.538Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e4/395c792b243f2367d84202dc33689aa3d910fb9826a7491ba20fc9e261f5/debugpy-1.8.14-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:329a15d0660ee09fec6786acdb6e0443d595f64f5d096fc3e3ccf09a4259033f", size = 2485676, upload-time = "2025-04-10T19:46:32.96Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f1/6f2ee3f991327ad9e4c2f8b82611a467052a0fb0e247390192580e89f7ff/debugpy-1.8.14-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f920c7f9af409d90f5fd26e313e119d908b0dd2952c2393cd3247a462331f15", size = 4217514, upload-time = "2025-04-10T19:46:34.336Z" }, + { url = "https://files.pythonhosted.org/packages/79/28/b9d146f8f2dc535c236ee09ad3e5ac899adb39d7a19b49f03ac95d216beb/debugpy-1.8.14-cp313-cp313-win32.whl", hash = "sha256:3784ec6e8600c66cbdd4ca2726c72d8ca781e94bce2f396cc606d458146f8f4e", size = 5254756, upload-time = "2025-04-10T19:46:36.199Z" }, + { url = "https://files.pythonhosted.org/packages/e0/62/a7b4a57013eac4ccaef6977966e6bec5c63906dd25a86e35f155952e29a1/debugpy-1.8.14-cp313-cp313-win_amd64.whl", hash = "sha256:684eaf43c95a3ec39a96f1f5195a7ff3d4144e4a18d69bb66beeb1a6de605d6e", size = 5297119, upload-time = "2025-04-10T19:46:38.141Z" }, + { url = "https://files.pythonhosted.org/packages/85/6f/96ba96545f55b6a675afa08c96b42810de9b18c7ad17446bbec82762127a/debugpy-1.8.14-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:413512d35ff52c2fb0fd2d65e69f373ffd24f0ecb1fac514c04a668599c5ce7f", size = 2077696, upload-time = "2025-04-10T19:46:46.817Z" }, + { url = "https://files.pythonhosted.org/packages/fa/84/f378a2dd837d94de3c85bca14f1db79f8fcad7e20b108b40d59da56a6d22/debugpy-1.8.14-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c9156f7524a0d70b7a7e22b2e311d8ba76a15496fb00730e46dcdeedb9e1eea", size = 3554846, upload-time = "2025-04-10T19:46:48.72Z" }, + { url = "https://files.pythonhosted.org/packages/db/52/88824fe5d6893f59933f664c6e12783749ab537a2101baf5c713164d8aa2/debugpy-1.8.14-cp39-cp39-win32.whl", hash = "sha256:b44985f97cc3dd9d52c42eb59ee9d7ee0c4e7ecd62bca704891f997de4cef23d", size = 5209350, upload-time = "2025-04-10T19:46:50.284Z" }, + { url = "https://files.pythonhosted.org/packages/41/35/72e9399be24a04cb72cfe1284572c9fcd1d742c7fa23786925c18fa54ad8/debugpy-1.8.14-cp39-cp39-win_amd64.whl", hash = "sha256:b1528cfee6c1b1c698eb10b6b096c598738a8238822d218173d21c3086de8123", size = 5241852, upload-time = "2025-04-10T19:46:52.022Z" }, + { url = "https://files.pythonhosted.org/packages/97/1a/481f33c37ee3ac8040d3d51fc4c4e4e7e61cb08b8bc8971d6032acc2279f/debugpy-1.8.14-py2.py3-none-any.whl", hash = "sha256:5cd9a579d553b6cb9759a7908a41988ee6280b961f24f63336835d9418216a20", size = 5256230, upload-time = "2025-04-10T19:46:54.077Z" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "executing" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755", size = 978693, upload-time = "2025-01-22T15:41:29.403Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702, upload-time = "2025-01-22T15:41:25.929Z" }, +] + +[[package]] +name = "fastjsonschema" +version = "2.21.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/50/4b769ce1ac4071a1ef6d86b1a3fb56cdc3a37615e8c5519e1af96cdac366/fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4", size = 373939, upload-time = "2024-12-02T10:55:15.133Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924, upload-time = "2024-12-02T10:55:07.599Z" }, +] + +[[package]] +name = "fixit" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "libcst" }, + { name = "moreorless" }, + { name = "packaging" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "trailrunner" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/22/fc513f039c17024fde3fe2ebe3bc93e4972f7717694613b1bc109068bfc1/fixit-2.1.0.tar.gz", hash = "sha256:b31665cb6491d659d8dfef5a6078a7e9f786e299826636d03d6bd91b6f71e95b", size = 219817, upload-time = "2023-10-26T02:37:14.329Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/91/635a1d52f36a546449031c63e54220c8a71e898bcd9cbccfe1181fc1812c/fixit-2.1.0-py3-none-any.whl", hash = "sha256:76b286c0abb9d6a63e5c7d1b6673a041c4356e93d70472e94a9ad2c447da7753", size = 83583, upload-time = "2023-10-26T02:37:12.574Z" }, +] + +[[package]] +name = "flake8" +version = "7.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pyflakes" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/c4/5842fc9fc94584c455543540af62fd9900faade32511fab650e9891ec225/flake8-7.2.0.tar.gz", hash = "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426", size = 48177, upload-time = "2025-03-29T20:08:39.329Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/5c/0627be4c9976d56b1217cb5187b7504e7fd7d3503f8bfd312a04077bd4f7/flake8-7.2.0-py2.py3-none-any.whl", hash = "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343", size = 57786, upload-time = "2025-03-29T20:08:37.902Z" }, +] + +[[package]] +name = "fqdn" +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/3e/a80a8c077fd798951169626cde3e239adeba7dab75deb3555716415bd9b0/fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f", size = 6015, upload-time = "2021-03-11T07:16:29.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014", size = 9121, upload-time = "2021-03-11T07:16:28.351Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "hypothesis" +version = "6.135.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/59/7022ef95715701cd90ac0cf04582e3507492ab200f370fd7ef12d80dda75/hypothesis-6.135.4.tar.gz", hash = "sha256:c63f6fc56840558c5c5e2441dd91fad1709da60bde756b816d4b89944e50a52f", size = 451895, upload-time = "2025-06-09T02:31:38.766Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/d4/25b3a9f35199eb1904967ca3e6db4afd636911fa39695760b0afac84f38a/hypothesis-6.135.4-py3-none-any.whl", hash = "sha256:6a3b13ce35d43e14aaf6a6ca4cc411e5342be5d05b77977499d07cf6a61e6e71", size = 517950, upload-time = "2025-06-09T02:31:34.463Z" }, +] + +[package.optional-dependencies] +lark = [ + { name = "lark" }, +] + +[[package]] +name = "hypothesmith" +version = "0.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hypothesis", extra = ["lark"] }, + { name = "libcst" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e3/f6/1a64114dee6c46985482c35bdbc12025db59973a0225eec47ac4d306030f/hypothesmith-0.3.3.tar.gz", hash = "sha256:96c14802d6c8e85d8975264176878db54b28d2ed921fdbfedc2e6b8ce3c81716", size = 25529, upload-time = "2024-02-16T20:21:24.511Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/bc/78dcf42c6eaaf7d628f061f1e533a596f5bca2a53be2b714adc5d370d48e/hypothesmith-0.3.3-py3-none-any.whl", hash = "sha256:fdb0172f9de97d09450da40da7da083fdd118bcd2f88b1a2289413d2d496b1b1", size = 19247, upload-time = "2024-02-16T20:20:47.059Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "intervaltree" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/fb/396d568039d21344639db96d940d40eb62befe704ef849b27949ded5c3bb/intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d", size = 32861, upload-time = "2020-08-03T08:01:11.392Z" } + +[[package]] +name = "ipykernel" +version = "6.29.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "appnope", marker = "sys_platform == 'darwin'" }, + { name = "comm" }, + { name = "debugpy" }, + { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "ipython", version = "9.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jupyter-client" }, + { name = "jupyter-core" }, + { name = "matplotlib-inline" }, + { name = "nest-asyncio" }, + { name = "packaging" }, + { name = "psutil" }, + { name = "pyzmq" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367, upload-time = "2024-07-01T14:07:22.543Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173, upload-time = "2024-07-01T14:07:19.603Z" }, +] + +[[package]] +name = "ipython" +version = "8.18.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "decorator", marker = "python_full_version < '3.10'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.10'" }, + { name = "jedi", marker = "python_full_version < '3.10'" }, + { name = "matplotlib-inline", marker = "python_full_version < '3.10'" }, + { name = "pexpect", marker = "python_full_version < '3.10' and sys_platform != 'win32'" }, + { name = "prompt-toolkit", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "stack-data", marker = "python_full_version < '3.10'" }, + { name = "traitlets", marker = "python_full_version < '3.10'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/b9/3ba6c45a6df813c09a48bac313c22ff83efa26cbb55011218d925a46e2ad/ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27", size = 5486330, upload-time = "2023-11-27T09:58:34.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/6b/d9fdcdef2eb6a23f391251fde8781c38d42acd82abe84d054cb74f7863b0/ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397", size = 808161, upload-time = "2023-11-27T09:58:30.538Z" }, +] + +[[package]] +name = "ipython" +version = "8.37.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, + { name = "decorator", marker = "python_full_version == '3.10.*'" }, + { name = "exceptiongroup", marker = "python_full_version == '3.10.*'" }, + { name = "jedi", marker = "python_full_version == '3.10.*'" }, + { name = "matplotlib-inline", marker = "python_full_version == '3.10.*'" }, + { name = "pexpect", marker = "python_full_version == '3.10.*' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit", marker = "python_full_version == '3.10.*'" }, + { name = "pygments", marker = "python_full_version == '3.10.*'" }, + { name = "stack-data", marker = "python_full_version == '3.10.*'" }, + { name = "traitlets", marker = "python_full_version == '3.10.*'" }, + { name = "typing-extensions", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/31/10ac88f3357fc276dc8a64e8880c82e80e7459326ae1d0a211b40abf6665/ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216", size = 5606088, upload-time = "2025-05-31T16:39:09.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/d0/274fbf7b0b12643cbbc001ce13e6a5b1607ac4929d1b11c72460152c9fc3/ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2", size = 831864, upload-time = "2025-05-31T16:39:06.38Z" }, +] + +[[package]] +name = "ipython" +version = "9.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version >= '3.11' and python_full_version < '3.13'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, + { name = "decorator", marker = "python_full_version >= '3.11'" }, + { name = "ipython-pygments-lexers", marker = "python_full_version >= '3.11'" }, + { name = "jedi", marker = "python_full_version >= '3.11'" }, + { name = "matplotlib-inline", marker = "python_full_version >= '3.11'" }, + { name = "pexpect", marker = "python_full_version >= '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit", marker = "python_full_version >= '3.11'" }, + { name = "pygments", marker = "python_full_version >= '3.11'" }, + { name = "stack-data", marker = "python_full_version >= '3.11'" }, + { name = "traitlets", marker = "python_full_version >= '3.11'" }, + { name = "typing-extensions", marker = "python_full_version == '3.11.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/09/4c7e06b96fbd203e06567b60fb41b06db606b6a82db6db7b2c85bb72a15c/ipython-9.3.0.tar.gz", hash = "sha256:79eb896f9f23f50ad16c3bc205f686f6e030ad246cc309c6279a242b14afe9d8", size = 4426460, upload-time = "2025-05-31T16:34:55.678Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/99/9ed3d52d00f1846679e3aa12e2326ac7044b5e7f90dc822b60115fa533ca/ipython-9.3.0-py3-none-any.whl", hash = "sha256:1a0b6dd9221a1f5dddf725b57ac0cb6fddc7b5f470576231ae9162b9b3455a04", size = 605320, upload-time = "2025-05-31T16:34:52.154Z" }, +] + +[[package]] +name = "ipython-pygments-lexers" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, +] + +[[package]] +name = "ipywidgets" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "comm" }, + { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "ipython", version = "9.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jupyterlab-widgets" }, + { name = "traitlets" }, + { name = "widgetsnbextension" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3e/48/d3dbac45c2814cb73812f98dd6b38bbcc957a4e7bb31d6ea9c03bf94ed87/ipywidgets-8.1.7.tar.gz", hash = "sha256:15f1ac050b9ccbefd45dccfbb2ef6bed0029d8278682d569d71b8dd96bee0376", size = 116721, upload-time = "2025-05-05T12:42:03.489Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/6a/9166369a2f092bd286d24e6307de555d63616e8ddb373ebad2b5635ca4cd/ipywidgets-8.1.7-py3-none-any.whl", hash = "sha256:764f2602d25471c213919b8a1997df04bef869251db4ca8efba1b76b1bd9f7bb", size = 139806, upload-time = "2025-05-05T12:41:56.833Z" }, +] + +[[package]] +name = "isoduration" +version = "20.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "arrow" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/1a/3c8edc664e06e6bd06cce40c6b22da5f1429aa4224d0c590f3be21c91ead/isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9", size = 11649, upload-time = "2020-11-01T11:00:00.312Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/55/e5326141505c5d5e34c5e0935d2908a74e4561eca44108fbfb9c13d2911a/isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042", size = 11321, upload-time = "2020-11-01T10:59:58.02Z" }, +] + +[[package]] +name = "jedi" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "parso" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "json5" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/be/c6c745ec4c4539b25a278b70e29793f10382947df0d9efba2fa09120895d/json5-0.12.0.tar.gz", hash = "sha256:0b4b6ff56801a1c7dc817b0241bca4ce474a0e6a163bfef3fc594d3fd263ff3a", size = 51907, upload-time = "2025-04-03T16:33:13.201Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/9f/3500910d5a98549e3098807493851eeef2b89cdd3032227558a104dfe926/json5-0.12.0-py3-none-any.whl", hash = "sha256:6d37aa6c08b0609f16e1ec5ff94697e2cbbfbad5ac112afa05794da9ab7810db", size = 36079, upload-time = "2025-04-03T16:33:11.927Z" }, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, +] + +[package.optional-dependencies] +format-nongpl = [ + { name = "fqdn" }, + { name = "idna" }, + { name = "isoduration" }, + { name = "jsonpointer" }, + { name = "rfc3339-validator" }, + { name = "rfc3986-validator" }, + { name = "uri-template" }, + { name = "webcolors" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, +] + +[[package]] +name = "jupyter" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ipykernel" }, + { name = "ipywidgets" }, + { name = "jupyter-console" }, + { name = "jupyterlab" }, + { name = "nbconvert" }, + { name = "notebook" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/f3/af28ea964ab8bc1e472dba2e82627d36d470c51f5cd38c37502eeffaa25e/jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a", size = 5714959, upload-time = "2024-08-30T07:15:48.299Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/64/285f20a31679bf547b75602702f7800e74dbabae36ef324f716c02804753/jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83", size = 2657, upload-time = "2024-08-30T07:15:47.045Z" }, +] + +[[package]] +name = "jupyter-client" +version = "8.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jupyter-core" }, + { name = "python-dateutil" }, + { name = "pyzmq" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019, upload-time = "2024-09-17T10:44:17.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105, upload-time = "2024-09-17T10:44:15.218Z" }, +] + +[[package]] +name = "jupyter-console" +version = "6.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ipykernel" }, + { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "ipython", version = "9.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jupyter-client" }, + { name = "jupyter-core" }, + { name = "prompt-toolkit" }, + { name = "pygments" }, + { name = "pyzmq" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bd/2d/e2fd31e2fc41c14e2bcb6c976ab732597e907523f6b2420305f9fc7fdbdb/jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539", size = 34363, upload-time = "2023-03-06T14:13:31.02Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/77/71d78d58f15c22db16328a476426f7ac4a60d3a5a7ba3b9627ee2f7903d4/jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485", size = 24510, upload-time = "2023-03-06T14:13:28.229Z" }, +] + +[[package]] +name = "jupyter-core" +version = "5.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "platformdirs" }, + { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/1b/72906d554acfeb588332eaaa6f61577705e9ec752ddb486f302dafa292d9/jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941", size = 88923, upload-time = "2025-05-27T07:38:16.655Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/57/6bffd4b20b88da3800c5d691e0337761576ee688eb01299eae865689d2df/jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0", size = 28880, upload-time = "2025-05-27T07:38:15.137Z" }, +] + +[[package]] +name = "jupyter-events" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonschema", extra = ["format-nongpl"] }, + { name = "packaging" }, + { name = "python-json-logger" }, + { name = "pyyaml" }, + { name = "referencing" }, + { name = "rfc3339-validator" }, + { name = "rfc3986-validator" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/c3/306d090461e4cf3cd91eceaff84bede12a8e52cd821c2d20c9a4fd728385/jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b", size = 62196, upload-time = "2025-02-03T17:23:41.485Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/48/577993f1f99c552f18a0428731a755e06171f9902fa118c379eb7c04ea22/jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb", size = 19430, upload-time = "2025-02-03T17:23:38.643Z" }, +] + +[[package]] +name = "jupyter-lsp" +version = "2.2.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jupyter-server" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/b4/3200b0b09c12bc3b72d943d923323c398eff382d1dcc7c0dbc8b74630e40/jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001", size = 48741, upload-time = "2024-04-09T17:59:44.918Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/e0/7bd7cff65594fd9936e2f9385701e44574fc7d721331ff676ce440b14100/jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da", size = 69146, upload-time = "2024-04-09T17:59:43.388Z" }, +] + +[[package]] +name = "jupyter-server" +version = "2.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "argon2-cffi" }, + { name = "jinja2" }, + { name = "jupyter-client" }, + { name = "jupyter-core" }, + { name = "jupyter-events" }, + { name = "jupyter-server-terminals" }, + { name = "nbconvert" }, + { name = "nbformat" }, + { name = "overrides" }, + { name = "packaging" }, + { name = "prometheus-client" }, + { name = "pywinpty", marker = "os_name == 'nt'" }, + { name = "pyzmq" }, + { name = "send2trash" }, + { name = "terminado" }, + { name = "tornado" }, + { name = "traitlets" }, + { name = "websocket-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/c8/ba2bbcd758c47f1124c4ca14061e8ce60d9c6fd537faee9534a95f83521a/jupyter_server-2.16.0.tar.gz", hash = "sha256:65d4b44fdf2dcbbdfe0aa1ace4a842d4aaf746a2b7b168134d5aaed35621b7f6", size = 728177, upload-time = "2025-05-12T16:44:46.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/1f/5ebbced977171d09a7b0c08a285ff9a20aafb9c51bde07e52349ff1ddd71/jupyter_server-2.16.0-py3-none-any.whl", hash = "sha256:3d8db5be3bc64403b1c65b400a1d7f4647a5ce743f3b20dbdefe8ddb7b55af9e", size = 386904, upload-time = "2025-05-12T16:44:43.335Z" }, +] + +[[package]] +name = "jupyter-server-terminals" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywinpty", marker = "os_name == 'nt'" }, + { name = "terminado" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/d5/562469734f476159e99a55426d697cbf8e7eb5efe89fb0e0b4f83a3d3459/jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269", size = 31430, upload-time = "2024-03-12T14:37:03.049Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/2d/2b32cdbe8d2a602f697a649798554e4f072115438e92249624e532e8aca6/jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa", size = 13656, upload-time = "2024-03-12T14:37:00.708Z" }, +] + +[[package]] +name = "jupyterlab" +version = "4.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-lru" }, + { name = "httpx" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "ipykernel" }, + { name = "jinja2" }, + { name = "jupyter-core" }, + { name = "jupyter-lsp" }, + { name = "jupyter-server" }, + { name = "jupyterlab-server" }, + { name = "notebook-shim" }, + { name = "packaging" }, + { name = "setuptools" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/2d/d1678dcf2db66cb4a38a80d9e5fcf48c349f3ac12f2d38882993353ae768/jupyterlab-4.4.3.tar.gz", hash = "sha256:a94c32fd7f8b93e82a49dc70a6ec45a5c18281ca2a7228d12765e4e210e5bca2", size = 23032376, upload-time = "2025-05-26T11:18:00.996Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/4d/7dd5c2ffbb960930452a031dc8410746183c924580f2ab4e68ceb5b3043f/jupyterlab-4.4.3-py3-none-any.whl", hash = "sha256:164302f6d4b6c44773dfc38d585665a4db401a16e5296c37df5cba63904fbdea", size = 12295480, upload-time = "2025-05-26T11:17:56.607Z" }, +] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900, upload-time = "2023-11-23T09:26:37.44Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884, upload-time = "2023-11-23T09:26:34.325Z" }, +] + +[[package]] +name = "jupyterlab-server" +version = "2.27.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "babel" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2" }, + { name = "json5" }, + { name = "jsonschema" }, + { name = "jupyter-server" }, + { name = "packaging" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/c9/a883ce65eb27905ce77ace410d83587c82ea64dc85a48d1f7ed52bcfa68d/jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4", size = 76173, upload-time = "2024-07-16T17:02:04.149Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/09/2032e7d15c544a0e3cd831c51d77a8ca57f7555b2e1b2922142eddb02a84/jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4", size = 59700, upload-time = "2024-07-16T17:02:01.115Z" }, +] + +[[package]] +name = "jupyterlab-widgets" +version = "3.0.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/7d/160595ca88ee87ac6ba95d82177d29ec60aaa63821d3077babb22ce031a5/jupyterlab_widgets-3.0.15.tar.gz", hash = "sha256:2920888a0c2922351a9202817957a68c07d99673504d6cd37345299e971bb08b", size = 213149, upload-time = "2025-05-05T12:32:31.004Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/6a/ca128561b22b60bd5a0c4ea26649e68c8556b82bc70a0c396eebc977fe86/jupyterlab_widgets-3.0.15-py3-none-any.whl", hash = "sha256:d59023d7d7ef71400d51e6fee9a88867f6e65e10a4201605d2d7f3e8f012a31c", size = 216571, upload-time = "2025-05-05T12:32:29.534Z" }, +] + +[[package]] +name = "lark" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/60/bc7622aefb2aee1c0b4ba23c1446d3e30225c8770b38d7aedbfb65ca9d5a/lark-1.2.2.tar.gz", hash = "sha256:ca807d0162cd16cef15a8feecb862d7319e7a09bdb13aef927968e45040fed80", size = 252132, upload-time = "2024-08-13T19:49:00.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/00/d90b10b962b4277f5e64a78b6609968859ff86889f5b898c1a778c06ec00/lark-1.2.2-py3-none-any.whl", hash = "sha256:c2276486b02f0f1b90be155f2c8ba4a8e194d42775786db622faccd652d8e80c", size = 111036, upload-time = "2024-08-13T19:48:58.603Z" }, +] + +[[package]] +name = "libcst" +source = { editable = "." } +dependencies = [ + { name = "pyyaml", marker = "python_full_version < '3.13'" }, + { name = "pyyaml-ft", marker = "python_full_version >= '3.13'" }, +] + +[package.dev-dependencies] +dev = [ + { name = "black" }, + { name = "build" }, + { name = "coverage", extra = ["toml"] }, + { name = "fixit" }, + { name = "flake8" }, + { name = "hypothesis" }, + { name = "hypothesmith" }, + { name = "maturin" }, + { name = "poethepoet" }, + { name = "prompt-toolkit" }, + { name = "pyre-check", marker = "sys_platform != 'win32'" }, + { name = "setuptools-rust" }, + { name = "setuptools-scm" }, + { name = "slotscheck" }, + { name = "ufmt" }, + { name = "usort" }, +] +docs = [ + { name = "black" }, + { name = "build" }, + { name = "coverage", extra = ["toml"] }, + { name = "fixit" }, + { name = "flake8" }, + { name = "hypothesis" }, + { name = "hypothesmith" }, + { name = "jinja2" }, + { name = "jupyter" }, + { name = "maturin" }, + { name = "nbsphinx" }, + { name = "poethepoet" }, + { name = "prompt-toolkit" }, + { name = "pyre-check", marker = "sys_platform != 'win32'" }, + { name = "setuptools-rust" }, + { name = "setuptools-scm" }, + { name = "slotscheck" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "sphinx-rtd-theme" }, + { name = "ufmt" }, + { name = "usort" }, +] + +[package.metadata] +requires-dist = [ + { name = "pyyaml", marker = "python_full_version < '3.13'", specifier = ">=5.2" }, + { name = "pyyaml-ft", marker = "python_full_version >= '3.13'", specifier = ">=8.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "black", specifier = "==25.1.0" }, + { name = "build", specifier = ">=0.10.0" }, + { name = "coverage", extras = ["toml"], specifier = ">=4.5.4" }, + { name = "fixit", specifier = "==2.1.0" }, + { name = "flake8", specifier = "==7.2.0" }, + { name = "hypothesis", specifier = ">=4.36.0" }, + { name = "hypothesmith", specifier = ">=0.0.4" }, + { name = "maturin", specifier = ">=1.7.0,<1.8" }, + { name = "poethepoet", specifier = ">=0.35.0" }, + { name = "prompt-toolkit", specifier = ">=2.0.9" }, + { name = "pyre-check", marker = "sys_platform != 'win32'", specifier = "==0.9.18" }, + { name = "setuptools-rust", specifier = ">=1.5.2" }, + { name = "setuptools-scm", specifier = ">=6.0.1" }, + { name = "slotscheck", specifier = ">=0.7.1" }, + { name = "ufmt", specifier = "==2.8.0" }, + { name = "usort", specifier = "==1.0.8.post1" }, +] +docs = [ + { name = "black", specifier = "==25.1.0" }, + { name = "build", specifier = ">=0.10.0" }, + { name = "coverage", extras = ["toml"], specifier = ">=4.5.4" }, + { name = "fixit", specifier = "==2.1.0" }, + { name = "flake8", specifier = "==7.2.0" }, + { name = "hypothesis", specifier = ">=4.36.0" }, + { name = "hypothesmith", specifier = ">=0.0.4" }, + { name = "jinja2", specifier = "==3.1.6" }, + { name = "jupyter", specifier = ">=1.0.0" }, + { name = "maturin", specifier = ">=1.7.0,<1.8" }, + { name = "nbsphinx", specifier = ">=0.4.2" }, + { name = "poethepoet", specifier = ">=0.35.0" }, + { name = "prompt-toolkit", specifier = ">=2.0.9" }, + { name = "pyre-check", marker = "sys_platform != 'win32'", specifier = "==0.9.18" }, + { name = "setuptools-rust", specifier = ">=1.5.2" }, + { name = "setuptools-scm", specifier = ">=6.0.1" }, + { name = "slotscheck", specifier = ">=0.7.1" }, + { name = "sphinx", specifier = ">=5.1.1" }, + { name = "sphinx-rtd-theme", specifier = ">=0.4.3" }, + { name = "ufmt", specifier = "==2.8.0" }, + { name = "usort", specifier = "==1.0.8.post1" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" }, +] + +[[package]] +name = "marshmallow" +version = "3.26.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825, upload-time = "2025-02-03T15:32:25.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878, upload-time = "2025-02-03T15:32:22.295Z" }, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159, upload-time = "2024-04-15T13:44:44.803Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" }, +] + +[[package]] +name = "maturin" +version = "1.7.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/1e/085ddc0e5b08ae7af7a743a0dd6ed06b22a1332288488f1a333137885150/maturin-1.7.8.tar.gz", hash = "sha256:649c6ef3f0fa4c5f596140d761dc5a4d577c485cc32fb5b9b344a8280352880d", size = 195704, upload-time = "2024-12-04T11:38:23.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/ed/c8bb26e91c879e418ae1b01630722ed20b6fe0e6755be8d538d83666f136/maturin-1.7.8-py3-none-linux_armv6l.whl", hash = "sha256:c6950fd2790acd93265e1501cea66f9249cff19724654424ca75a3b17ebb315b", size = 7515691, upload-time = "2024-12-04T11:37:55.443Z" }, + { url = "https://files.pythonhosted.org/packages/38/7a/573f969315f0b92a09a0a565d45e98812c87796e2e19a7856159ab234faf/maturin-1.7.8-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f98288d5c382bacf0c076871dfd50c38f1eb2248f417551e98dd6f47f6ee8afa", size = 14434454, upload-time = "2024-12-04T11:37:58.448Z" }, + { url = "https://files.pythonhosted.org/packages/a6/17/46834841fbf19231487f185e68b95ca348cc05cce49be8787e0bc7e9dc47/maturin-1.7.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b2d4e0f674ca29864e6b86c2eb9fee8236d1c7496c25f7300e34229272468f4c", size = 7509122, upload-time = "2024-12-04T11:38:01.355Z" }, + { url = "https://files.pythonhosted.org/packages/c1/8f/bf8b4871eb390a4baef2e0bb5016852c7c0311a9772e2945534cfa2ee40e/maturin-1.7.8-py3-none-manylinux_2_12_i686.manylinux2010_i686.musllinux_1_1_i686.whl", hash = "sha256:6cafb17bf57822bdc04423d9e3e766d42918d474848fe9833e397267514ba891", size = 7598870, upload-time = "2024-12-04T11:38:03.708Z" }, + { url = "https://files.pythonhosted.org/packages/dc/43/c842be67a7c59568082345249b956138ae93d0b2474fb41c186ce26d05e1/maturin-1.7.8-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:2b2bdee0c3a84696b3a809054c43ead1a04b7b3321cbd5b8f5676e4ba4691d0f", size = 7932310, upload-time = "2024-12-04T11:38:05.463Z" }, + { url = "https://files.pythonhosted.org/packages/12/12/42435d05f2d6c75eb621751e6f021d29eb34d18e3b9c5c94d828744c2d54/maturin-1.7.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:b8188b71259fc2bc568d9c8acc186fcfed96f42539bcb55b8e6f4ec26e411f37", size = 7321964, upload-time = "2024-12-04T11:38:07.143Z" }, + { url = "https://files.pythonhosted.org/packages/b4/26/f3272ee985ebf9b3e8c4cd4f4efb022af1e12c9f53aed0dcc9a255399f4e/maturin-1.7.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:a4f58c2a53c2958a1bf090960b08b28e676136cd88ac2f5dfdcf1b14ea54ec06", size = 7408613, upload-time = "2024-12-04T11:38:09.814Z" }, + { url = "https://files.pythonhosted.org/packages/36/7d/be27bcc7d3ac6e6c2136a8ec0cc56f227a292d6cfdde55e095b6c0aa24a9/maturin-1.7.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:c5d6c0c631d1fc646cd3834795e6cfd72ab4271d289df7e0f911261a02bec75f", size = 9496974, upload-time = "2024-12-04T11:38:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e8/0d7323e9a31c11edf69c4473d73eca74803ce3e2390abf8ae3ac7eb10b04/maturin-1.7.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c23664d19dadcbf800ef70f26afb2e0485a985c62889930934f019c565534c23", size = 10828401, upload-time = "2024-12-04T11:38:14.42Z" }, + { url = "https://files.pythonhosted.org/packages/7e/82/5080e052c0d8c9872f6d4b94cae84c17ed7f2ea270d709210ea6445b655f/maturin-1.7.8-py3-none-win32.whl", hash = "sha256:403eebf1afa6f19b49425f089e39c53b8e597bc86a47f3a76e828dc78d27fa80", size = 6845240, upload-time = "2024-12-04T11:38:17.162Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c9/9b162361ded893f36038c2f8ac6a972ec441c11df8d17c440997eb28090f/maturin-1.7.8-py3-none-win_amd64.whl", hash = "sha256:1ce48d007438b895f8665314b6748ac0dab31e4f32049a60b52281dd2dccbdde", size = 7762332, upload-time = "2024-12-04T11:38:19.445Z" }, + { url = "https://files.pythonhosted.org/packages/fa/40/46d4742db742f69a7fe0054cd7c82bc79b2d70cb8c91f7e737e75c28a5f3/maturin-1.7.8-py3-none-win_arm64.whl", hash = "sha256:cc92a62953205e8945b6cfe6943d6a8576a4442d30d9c67141f944f4f4640e62", size = 6501353, upload-time = "2024-12-04T11:38:21.713Z" }, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, +] + +[[package]] +name = "mistune" +version = "3.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/79/bda47f7dd7c3c55770478d6d02c9960c430b0cf1773b72366ff89126ea31/mistune-3.1.3.tar.gz", hash = "sha256:a7035c21782b2becb6be62f8f25d3df81ccb4d6fa477a6525b15af06539f02a0", size = 94347, upload-time = "2025-03-19T14:27:24.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/4d/23c4e4f09da849e127e9f123241946c23c1e30f45a88366879e064211815/mistune-3.1.3-py3-none-any.whl", hash = "sha256:1a32314113cff28aa6432e99e522677c8587fd83e3d51c29b82a52409c842bd9", size = 53410, upload-time = "2025-03-19T14:27:23.451Z" }, +] + +[[package]] +name = "moreorless" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8d/85/2e4999ac4a21ab3c5f31e2a48e0989a80be3afc512a7983e3253615983d4/moreorless-0.5.0.tar.gz", hash = "sha256:560a04f85006fccd74feaa4b6213a446392ff7b5ec0194a5464b6c30f182fa33", size = 14093, upload-time = "2025-05-04T22:29:59.006Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/2e/9ea80ca55b73530b7639c6f146a58f636ddfe5a852ad467a44fe3e80d809/moreorless-0.5.0-py3-none-any.whl", hash = "sha256:66228870cd2f14bad5c3c3780aa71e29d3b2d9b5a01c03bfbf105efd4f668ecf", size = 14380, upload-time = "2025-05-04T22:29:57.417Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "nbclient" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-client" }, + { name = "jupyter-core" }, + { name = "nbformat" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/87/66/7ffd18d58eae90d5721f9f39212327695b749e23ad44b3881744eaf4d9e8/nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193", size = 62424, upload-time = "2024-12-19T10:32:27.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/6d/e7fa07f03a4a7b221d94b4d586edb754a9b0dc3c9e2c93353e9fa4e0d117/nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d", size = 25434, upload-time = "2024-12-19T10:32:24.139Z" }, +] + +[[package]] +name = "nbconvert" +version = "7.16.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "bleach", extra = ["css"] }, + { name = "defusedxml" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2" }, + { name = "jupyter-core" }, + { name = "jupyterlab-pygments" }, + { name = "markupsafe" }, + { name = "mistune" }, + { name = "nbclient" }, + { name = "nbformat" }, + { name = "packaging" }, + { name = "pandocfilters" }, + { name = "pygments" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" }, +] + +[[package]] +name = "nbformat" +version = "5.10.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastjsonschema" }, + { name = "jsonschema" }, + { name = "jupyter-core" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" }, +] + +[[package]] +name = "nbsphinx" +version = "0.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "jinja2" }, + { name = "nbconvert" }, + { name = "nbformat" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1e/84/b1856b7651ac34e965aa567a158714c7f3bd42a1b1ce76bf423ffb99872c/nbsphinx-0.9.7.tar.gz", hash = "sha256:abd298a686d55fa894ef697c51d44f24e53aa312dadae38e82920f250a5456fe", size = 180479, upload-time = "2025-03-03T19:46:08.069Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/2d/8c8e635bcc6757573d311bb3c5445426382f280da32b8cd6d82d501ef4a4/nbsphinx-0.9.7-py3-none-any.whl", hash = "sha256:7292c3767fea29e405c60743eee5393682a83982ab202ff98f5eb2db02629da8", size = 31660, upload-time = "2025-03-03T19:46:06.581Z" }, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, +] + +[[package]] +name = "notebook" +version = "7.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-server" }, + { name = "jupyterlab" }, + { name = "jupyterlab-server" }, + { name = "notebook-shim" }, + { name = "tornado" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/21/4f83b15e483da4f4f63928edd0cb08b6e7d33f8a15c23b116a90c44c6235/notebook-7.4.3.tar.gz", hash = "sha256:a1567481cd3853f2610ee0ecf5dfa12bb508e878ee8f92152c134ef7f0568a76", size = 13881668, upload-time = "2025-05-26T14:27:21.656Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/1b/16c809d799e3ddd7a97c8b43734f79624b74ddef9707e7d92275a13777bc/notebook-7.4.3-py3-none-any.whl", hash = "sha256:9cdeee954e04101cadb195d90e2ab62b7c9286c1d4f858bf3bb54e40df16c0c3", size = 14286402, upload-time = "2025-05-26T14:27:17.339Z" }, +] + +[[package]] +name = "notebook-shim" +version = "0.2.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-server" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/54/d2/92fa3243712b9a3e8bafaf60aac366da1cada3639ca767ff4b5b3654ec28/notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb", size = 13167, upload-time = "2024-02-14T23:35:18.353Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/33/bd5b9137445ea4b680023eb0469b2bb969d61303dedb2aac6560ff3d14a1/notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef", size = 13307, upload-time = "2024-02-14T23:35:16.286Z" }, +] + +[[package]] +name = "overrides" +version = "7.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pandocfilters" +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454, upload-time = "2024-01-18T20:08:13.726Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663, upload-time = "2024-01-18T20:08:11.28Z" }, +] + +[[package]] +name = "parso" +version = "0.8.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609, upload-time = "2024-04-05T09:43:55.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650, upload-time = "2024-04-05T09:43:53.299Z" }, +] + +[[package]] +name = "pastel" +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/f1/4594f5e0fcddb6953e5b8fe00da8c317b8b41b547e2b3ae2da7512943c62/pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d", size = 7555, upload-time = "2020-09-16T19:21:12.43Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/18/a8444036c6dd65ba3624c63b734d3ba95ba63ace513078e1580590075d21/pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364", size = 5955, upload-time = "2020-09-16T19:21:11.409Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, +] + +[[package]] +name = "poethepoet" +version = "0.35.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pastel" }, + { name = "pyyaml" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/b1/d4f4361b278fae10f6074675385ce3acf53c647f8e6eeba22c652f8ba985/poethepoet-0.35.0.tar.gz", hash = "sha256:b396ae862d7626e680bbd0985b423acf71634ce93a32d8b5f38340f44f5fbc3e", size = 66006, upload-time = "2025-06-09T12:58:18.849Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/08/abc2d7e2400dd8906e3208f9b88ac610f097d7ee0c7a1fa4a157b49a9e86/poethepoet-0.35.0-py3-none-any.whl", hash = "sha256:bed5ae1fd63f179dfa67aabb93fa253d79695c69667c927d8b24ff378799ea75", size = 87164, upload-time = "2025-06-09T12:58:17.084Z" }, +] + +[[package]] +name = "prometheus-client" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/cf/40dde0a2be27cc1eb41e333d1a674a74ce8b8b0457269cc640fd42b07cf7/prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28", size = 69746, upload-time = "2025-06-02T14:29:01.152Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/ae/ec06af4fe3ee72d16973474f122541746196aaa16cea6f66d18b963c6177/prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094", size = 58694, upload-time = "2025-06-02T14:29:00.068Z" }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.51" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" }, +] + +[[package]] +name = "psutil" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, + { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, + { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, + { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, +] + +[[package]] +name = "pycodestyle" +version = "2.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/6e/1f4a62078e4d95d82367f24e685aef3a672abfd27d1a868068fed4ed2254/pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae", size = 39312, upload-time = "2025-03-29T17:33:30.669Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/be/b00116df1bfb3e0bb5b45e29d604799f7b91dd861637e4d448b4e09e6a3e/pycodestyle-2.13.0-py2.py3-none-any.whl", hash = "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9", size = 31424, upload-time = "2025-03-29T17:33:29.405Z" }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, +] + +[[package]] +name = "pyflakes" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/cc/1df338bd7ed1fa7c317081dcf29bf2f01266603b301e6858856d346a12b3/pyflakes-3.3.2.tar.gz", hash = "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b", size = 64175, upload-time = "2025-03-31T13:21:20.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/40/b293a4fa769f3b02ab9e387c707c4cbdc34f073f945de0386107d4e669e6/pyflakes-3.3.2-py2.py3-none-any.whl", hash = "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a", size = 63164, upload-time = "2025-03-31T13:21:18.503Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, +] + +[[package]] +name = "pyproject-hooks" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/82/28175b2414effca1cdac8dc99f76d660e7a4fb0ceefa4b4ab8f5f6742925/pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8", size = 19228, upload-time = "2024-09-29T09:24:13.293Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216, upload-time = "2024-09-29T09:24:11.978Z" }, +] + +[[package]] +name = "pyre-check" +version = "0.9.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "dataclasses-json" }, + { name = "intervaltree" }, + { name = "libcst" }, + { name = "psutil" }, + { name = "pyre-extensions" }, + { name = "tabulate" }, + { name = "testslide" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/02/a92e10ecddce435f794493e18e1c0add477e3c307023525a49cffa299163/pyre-check-0.9.18.tar.gz", hash = "sha256:d5eb6db9011a7207189ecd0eaf32951e46cb0769c0f96a78fd0b90e633c9df2c", size = 18030825, upload-time = "2023-02-14T00:59:29.593Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/d9/5207ccd2eff3870b44f11c2db6b6d8e31cbcaca973a1b5ba4ac6d2460e41/pyre_check-0.9.18-py3-none-macosx_10_11_x86_64.whl", hash = "sha256:22633f5af3b986d266451a9e386a32414f8868de0a94226c7766f81eb080c59d", size = 19378418, upload-time = "2023-02-14T00:59:24.891Z" }, + { url = "https://files.pythonhosted.org/packages/33/07/865a1ca2a57fc2e9a0f78e005938a465b8a2ff748538fb5a0c1c19cb661f/pyre_check-0.9.18-py3-none-manylinux1_x86_64.whl", hash = "sha256:5659d4dbd6d1dd3052359861d828419f07d1ced1dad4ce4ca79071d252699c26", size = 23486523, upload-time = "2023-02-14T00:59:21.022Z" }, +] + +[[package]] +name = "pyre-extensions" +version = "0.0.32" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, + { name = "typing-inspect" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/53/5bc2532536e921c48366ad1047c1344ccef6afa5e84053f0f6e20a453767/pyre_extensions-0.0.32.tar.gz", hash = "sha256:5396715f14ea56c4d5fd0a88c57ca7e44faa468f905909edd7de4ad90ed85e55", size = 10852, upload-time = "2024-11-22T19:26:44.152Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/7a/9812cb8be9828ab688203c5ac5f743c60652887f0c00995a6f6f19f912bd/pyre_extensions-0.0.32-py3-none-any.whl", hash = "sha256:a63ba6883ab02f4b1a9f372ed4eb4a2f4c6f3d74879aa2725186fdfcfe3e5c68", size = 12766, upload-time = "2024-11-22T19:26:42.465Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-json-logger" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/de/d3144a0bceede957f961e975f3752760fbe390d57fbe194baf709d8f1f7b/python_json_logger-3.3.0.tar.gz", hash = "sha256:12b7e74b17775e7d565129296105bbe3910842d9d0eb083fc83a6a617aa8df84", size = 16642, upload-time = "2025-03-07T07:08:27.301Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/20/0f2523b9e50a8052bc6a8b732dfc8568abbdc42010aef03a2d750bdab3b2/python_json_logger-3.3.0-py3-none-any.whl", hash = "sha256:dd980fae8cffb24c13caf6e158d3d61c0d6d22342f932cb6e9deedab3d35eec7", size = 15163, upload-time = "2025-03-07T07:08:25.627Z" }, +] + +[[package]] +name = "pywin32" +version = "310" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/da/a5f38fffbba2fb99aa4aa905480ac4b8e83ca486659ac8c95bce47fb5276/pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1", size = 8848240, upload-time = "2025-03-17T00:55:46.783Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fe/d873a773324fa565619ba555a82c9dabd677301720f3660a731a5d07e49a/pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d", size = 9601854, upload-time = "2025-03-17T00:55:48.783Z" }, + { url = "https://files.pythonhosted.org/packages/3c/84/1a8e3d7a15490d28a5d816efa229ecb4999cdc51a7c30dd8914f669093b8/pywin32-310-cp310-cp310-win_arm64.whl", hash = "sha256:33babed0cf0c92a6f94cc6cc13546ab24ee13e3e800e61ed87609ab91e4c8213", size = 8522963, upload-time = "2025-03-17T00:55:50.969Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b1/68aa2986129fb1011dabbe95f0136f44509afaf072b12b8f815905a39f33/pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd", size = 8784284, upload-time = "2025-03-17T00:55:53.124Z" }, + { url = "https://files.pythonhosted.org/packages/b3/bd/d1592635992dd8db5bb8ace0551bc3a769de1ac8850200cfa517e72739fb/pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c", size = 9520748, upload-time = "2025-03-17T00:55:55.203Z" }, + { url = "https://files.pythonhosted.org/packages/90/b1/ac8b1ffce6603849eb45a91cf126c0fa5431f186c2e768bf56889c46f51c/pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582", size = 8455941, upload-time = "2025-03-17T00:55:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ec/4fdbe47932f671d6e348474ea35ed94227fb5df56a7c30cbbb42cd396ed0/pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d", size = 8796239, upload-time = "2025-03-17T00:55:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e5/b0627f8bb84e06991bea89ad8153a9e50ace40b2e1195d68e9dff6b03d0f/pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060", size = 9503839, upload-time = "2025-03-17T00:56:00.8Z" }, + { url = "https://files.pythonhosted.org/packages/1f/32/9ccf53748df72301a89713936645a664ec001abd35ecc8578beda593d37d/pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966", size = 8459470, upload-time = "2025-03-17T00:56:02.601Z" }, + { url = "https://files.pythonhosted.org/packages/1c/09/9c1b978ffc4ae53999e89c19c77ba882d9fce476729f23ef55211ea1c034/pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab", size = 8794384, upload-time = "2025-03-17T00:56:04.383Z" }, + { url = "https://files.pythonhosted.org/packages/45/3c/b4640f740ffebadd5d34df35fecba0e1cfef8fde9f3e594df91c28ad9b50/pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e", size = 9503039, upload-time = "2025-03-17T00:56:06.207Z" }, + { url = "https://files.pythonhosted.org/packages/b4/f4/f785020090fb050e7fb6d34b780f2231f302609dc964672f72bfaeb59a28/pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33", size = 8458152, upload-time = "2025-03-17T00:56:07.819Z" }, + { url = "https://files.pythonhosted.org/packages/a2/cd/d09d434630edb6a0c44ad5079611279a67530296cfe0451e003de7f449ff/pywin32-310-cp39-cp39-win32.whl", hash = "sha256:851c8d927af0d879221e616ae1f66145253537bbdd321a77e8ef701b443a9a1a", size = 8848099, upload-time = "2025-03-17T00:55:42.415Z" }, + { url = "https://files.pythonhosted.org/packages/93/ff/2a8c10315ffbdee7b3883ac0d1667e267ca8b3f6f640d81d43b87a82c0c7/pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475", size = 9602031, upload-time = "2025-03-17T00:55:44.512Z" }, +] + +[[package]] +name = "pywinpty" +version = "2.0.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/7c/917f9c4681bb8d34bfbe0b79d36bbcd902651aeab48790df3d30ba0202fb/pywinpty-2.0.15.tar.gz", hash = "sha256:312cf39153a8736c617d45ce8b6ad6cd2107de121df91c455b10ce6bba7a39b2", size = 29017, upload-time = "2025-02-03T21:53:23.265Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/b7/855db919ae526d2628f3f2e6c281c4cdff7a9a8af51bb84659a9f07b1861/pywinpty-2.0.15-cp310-cp310-win_amd64.whl", hash = "sha256:8e7f5de756a615a38b96cd86fa3cd65f901ce54ce147a3179c45907fa11b4c4e", size = 1405161, upload-time = "2025-02-03T21:56:25.008Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ac/6884dcb7108af66ad53f73ef4dad096e768c9203a6e6ce5e6b0c4a46e238/pywinpty-2.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:9a6bcec2df2707aaa9d08b86071970ee32c5026e10bcc3cc5f6f391d85baf7ca", size = 1405249, upload-time = "2025-02-03T21:55:47.114Z" }, + { url = "https://files.pythonhosted.org/packages/88/e5/9714def18c3a411809771a3fbcec70bffa764b9675afb00048a620fca604/pywinpty-2.0.15-cp312-cp312-win_amd64.whl", hash = "sha256:83a8f20b430bbc5d8957249f875341a60219a4e971580f2ba694fbfb54a45ebc", size = 1405243, upload-time = "2025-02-03T21:56:52.476Z" }, + { url = "https://files.pythonhosted.org/packages/fb/16/2ab7b3b7f55f3c6929e5f629e1a68362981e4e5fed592a2ed1cb4b4914a5/pywinpty-2.0.15-cp313-cp313-win_amd64.whl", hash = "sha256:ab5920877dd632c124b4ed17bc6dd6ef3b9f86cd492b963ffdb1a67b85b0f408", size = 1405020, upload-time = "2025-02-03T21:56:04.753Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/edef3515dd2030db2795dbfbe392232c7a0f3dc41b98e92b38b42ba497c7/pywinpty-2.0.15-cp313-cp313t-win_amd64.whl", hash = "sha256:a4560ad8c01e537708d2790dbe7da7d986791de805d89dd0d3697ca59e9e4901", size = 1404151, upload-time = "2025-02-03T21:55:53.628Z" }, + { url = "https://files.pythonhosted.org/packages/47/96/90fa02f19b1eff7469ad7bf0ef8efca248025de9f1d0a0b25682d2aacf68/pywinpty-2.0.15-cp39-cp39-win_amd64.whl", hash = "sha256:d261cd88fcd358cfb48a7ca0700db3e1c088c9c10403c9ebc0d8a8b57aa6a117", size = 1405302, upload-time = "2025-02-03T21:55:40.394Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, + { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777, upload-time = "2024-08-06T20:33:25.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318, upload-time = "2024-08-06T20:33:27.212Z" }, + { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891, upload-time = "2024-08-06T20:33:28.974Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614, upload-time = "2024-08-06T20:33:34.157Z" }, + { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360, upload-time = "2024-08-06T20:33:35.84Z" }, + { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006, upload-time = "2024-08-06T20:33:37.501Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577, upload-time = "2024-08-06T20:33:39.389Z" }, + { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593, upload-time = "2024-08-06T20:33:46.63Z" }, + { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, +] + +[[package]] +name = "pyyaml-ft" +version = "8.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/eb/5a0d575de784f9a1f94e2b1288c6886f13f34185e13117ed530f32b6f8a8/pyyaml_ft-8.0.0.tar.gz", hash = "sha256:0c947dce03954c7b5d38869ed4878b2e6ff1d44b08a0d84dc83fdad205ae39ab", size = 141057, upload-time = "2025-06-10T15:32:15.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/ba/a067369fe61a2e57fb38732562927d5bae088c73cb9bb5438736a9555b29/pyyaml_ft-8.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c1306282bc958bfda31237f900eb52c9bedf9b93a11f82e1aab004c9a5657a6", size = 187027, upload-time = "2025-06-10T15:31:48.722Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c5/a3d2020ce5ccfc6aede0d45bcb870298652ac0cf199f67714d250e0cdf39/pyyaml_ft-8.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:30c5f1751625786c19de751e3130fc345ebcba6a86f6bddd6e1285342f4bbb69", size = 176146, upload-time = "2025-06-10T15:31:50.584Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bb/23a9739291086ca0d3189eac7cd92b4d00e9fdc77d722ab610c35f9a82ba/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fa992481155ddda2e303fcc74c79c05eddcdbc907b888d3d9ce3ff3e2adcfb0", size = 746792, upload-time = "2025-06-10T15:31:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c2/e8825f4ff725b7e560d62a3609e31d735318068e1079539ebfde397ea03e/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cec6c92b4207004b62dfad1f0be321c9f04725e0f271c16247d8b39c3bf3ea42", size = 786772, upload-time = "2025-06-10T15:31:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/35/be/58a4dcae8854f2fdca9b28d9495298fd5571a50d8430b1c3033ec95d2d0e/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06237267dbcab70d4c0e9436d8f719f04a51123f0ca2694c00dd4b68c338e40b", size = 778723, upload-time = "2025-06-10T15:31:56.093Z" }, + { url = "https://files.pythonhosted.org/packages/86/ed/fed0da92b5d5d7340a082e3802d84c6dc9d5fa142954404c41a544c1cb92/pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8a7f332bc565817644cdb38ffe4739e44c3e18c55793f75dddb87630f03fc254", size = 758478, upload-time = "2025-06-10T15:31:58.314Z" }, + { url = "https://files.pythonhosted.org/packages/f0/69/ac02afe286275980ecb2dcdc0156617389b7e0c0a3fcdedf155c67be2b80/pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d10175a746be65f6feb86224df5d6bc5c049ebf52b89a88cf1cd78af5a367a8", size = 799159, upload-time = "2025-06-10T15:31:59.675Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ac/c492a9da2e39abdff4c3094ec54acac9747743f36428281fb186a03fab76/pyyaml_ft-8.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:58e1015098cf8d8aec82f360789c16283b88ca670fe4275ef6c48c5e30b22a96", size = 158779, upload-time = "2025-06-10T15:32:01.029Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9b/41998df3298960d7c67653669f37710fa2d568a5fc933ea24a6df60acaf6/pyyaml_ft-8.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5f3e2ceb790d50602b2fd4ec37abbd760a8c778e46354df647e7c5a4ebb", size = 191331, upload-time = "2025-06-10T15:32:02.602Z" }, + { url = "https://files.pythonhosted.org/packages/0f/16/2710c252ee04cbd74d9562ebba709e5a284faeb8ada88fcda548c9191b47/pyyaml_ft-8.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8d445bf6ea16bb93c37b42fdacfb2f94c8e92a79ba9e12768c96ecde867046d1", size = 182879, upload-time = "2025-06-10T15:32:04.466Z" }, + { url = "https://files.pythonhosted.org/packages/9a/40/ae8163519d937fa7bfa457b6f78439cc6831a7c2b170e4f612f7eda71815/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c56bb46b4fda34cbb92a9446a841da3982cdde6ea13de3fbd80db7eeeab8b49", size = 811277, upload-time = "2025-06-10T15:32:06.214Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/28d82dbff7f87b96f0eeac79b7d972a96b4980c1e445eb6a857ba91eda00/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dab0abb46eb1780da486f022dce034b952c8ae40753627b27a626d803926483b", size = 831650, upload-time = "2025-06-10T15:32:08.076Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/161c4566facac7d75a9e182295c223060373d4116dead9cc53a265de60b9/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd48d639cab5ca50ad957b6dd632c7dd3ac02a1abe0e8196a3c24a52f5db3f7a", size = 815755, upload-time = "2025-06-10T15:32:09.435Z" }, + { url = "https://files.pythonhosted.org/packages/05/10/f42c48fa5153204f42eaa945e8d1fd7c10d6296841dcb2447bf7da1be5c4/pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:052561b89d5b2a8e1289f326d060e794c21fa068aa11255fe71d65baf18a632e", size = 810403, upload-time = "2025-06-10T15:32:11.051Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d2/e369064aa51009eb9245399fd8ad2c562bd0bcd392a00be44b2a824ded7c/pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3bb4b927929b0cb162fb1605392a321e3333e48ce616cdcfa04a839271373255", size = 835581, upload-time = "2025-06-10T15:32:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/c0/28/26534bed77109632a956977f60d8519049f545abc39215d086e33a61f1f2/pyyaml_ft-8.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:de04cfe9439565e32f178106c51dd6ca61afaa2907d143835d501d84703d3793", size = 171579, upload-time = "2025-06-10T15:32:14.34Z" }, +] + +[[package]] +name = "pyzmq" +version = "26.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "implementation_name == 'pypy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/11/b9213d25230ac18a71b39b3723494e57adebe36e066397b961657b3b41c1/pyzmq-26.4.0.tar.gz", hash = "sha256:4bd13f85f80962f91a651a7356fe0472791a5f7a92f227822b5acf44795c626d", size = 278293, upload-time = "2025-04-04T12:05:44.049Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/b8/af1d814ffc3ff9730f9a970cbf216b6f078e5d251a25ef5201d7bc32a37c/pyzmq-26.4.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:0329bdf83e170ac133f44a233fc651f6ed66ef8e66693b5af7d54f45d1ef5918", size = 1339238, upload-time = "2025-04-04T12:03:07.022Z" }, + { url = "https://files.pythonhosted.org/packages/ee/e4/5aafed4886c264f2ea6064601ad39c5fc4e9b6539c6ebe598a859832eeee/pyzmq-26.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:398a825d2dea96227cf6460ce0a174cf7657d6f6827807d4d1ae9d0f9ae64315", size = 672848, upload-time = "2025-04-04T12:03:08.591Z" }, + { url = "https://files.pythonhosted.org/packages/79/39/026bf49c721cb42f1ef3ae0ee3d348212a7621d2adb739ba97599b6e4d50/pyzmq-26.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d52d62edc96787f5c1dfa6c6ccff9b581cfae5a70d94ec4c8da157656c73b5b", size = 911299, upload-time = "2025-04-04T12:03:10Z" }, + { url = "https://files.pythonhosted.org/packages/03/23/b41f936a9403b8f92325c823c0f264c6102a0687a99c820f1aaeb99c1def/pyzmq-26.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1410c3a3705db68d11eb2424d75894d41cff2f64d948ffe245dd97a9debfebf4", size = 867920, upload-time = "2025-04-04T12:03:11.311Z" }, + { url = "https://files.pythonhosted.org/packages/c1/3e/2de5928cdadc2105e7c8f890cc5f404136b41ce5b6eae5902167f1d5641c/pyzmq-26.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7dacb06a9c83b007cc01e8e5277f94c95c453c5851aac5e83efe93e72226353f", size = 862514, upload-time = "2025-04-04T12:03:13.013Z" }, + { url = "https://files.pythonhosted.org/packages/ce/57/109569514dd32e05a61d4382bc88980c95bfd2f02e58fea47ec0ccd96de1/pyzmq-26.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6bab961c8c9b3a4dc94d26e9b2cdf84de9918931d01d6ff38c721a83ab3c0ef5", size = 1204494, upload-time = "2025-04-04T12:03:14.795Z" }, + { url = "https://files.pythonhosted.org/packages/aa/02/dc51068ff2ca70350d1151833643a598625feac7b632372d229ceb4de3e1/pyzmq-26.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a5c09413b924d96af2aa8b57e76b9b0058284d60e2fc3730ce0f979031d162a", size = 1514525, upload-time = "2025-04-04T12:03:16.246Z" }, + { url = "https://files.pythonhosted.org/packages/48/2a/a7d81873fff0645eb60afaec2b7c78a85a377af8f1d911aff045d8955bc7/pyzmq-26.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d489ac234d38e57f458fdbd12a996bfe990ac028feaf6f3c1e81ff766513d3b", size = 1414659, upload-time = "2025-04-04T12:03:17.652Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ea/813af9c42ae21845c1ccfe495bd29c067622a621e85d7cda6bc437de8101/pyzmq-26.4.0-cp310-cp310-win32.whl", hash = "sha256:dea1c8db78fb1b4b7dc9f8e213d0af3fc8ecd2c51a1d5a3ca1cde1bda034a980", size = 580348, upload-time = "2025-04-04T12:03:19.384Z" }, + { url = "https://files.pythonhosted.org/packages/20/68/318666a89a565252c81d3fed7f3b4c54bd80fd55c6095988dfa2cd04a62b/pyzmq-26.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:fa59e1f5a224b5e04dc6c101d7186058efa68288c2d714aa12d27603ae93318b", size = 643838, upload-time = "2025-04-04T12:03:20.795Z" }, + { url = "https://files.pythonhosted.org/packages/91/f8/fb1a15b5f4ecd3e588bfde40c17d32ed84b735195b5c7d1d7ce88301a16f/pyzmq-26.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:a651fe2f447672f4a815e22e74630b6b1ec3a1ab670c95e5e5e28dcd4e69bbb5", size = 559565, upload-time = "2025-04-04T12:03:22.676Z" }, + { url = "https://files.pythonhosted.org/packages/32/6d/234e3b0aa82fd0290b1896e9992f56bdddf1f97266110be54d0177a9d2d9/pyzmq-26.4.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:bfcf82644c9b45ddd7cd2a041f3ff8dce4a0904429b74d73a439e8cab1bd9e54", size = 1339723, upload-time = "2025-04-04T12:03:24.358Z" }, + { url = "https://files.pythonhosted.org/packages/4f/11/6d561efe29ad83f7149a7cd48e498e539ed09019c6cd7ecc73f4cc725028/pyzmq-26.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9bcae3979b2654d5289d3490742378b2f3ce804b0b5fd42036074e2bf35b030", size = 672645, upload-time = "2025-04-04T12:03:25.693Z" }, + { url = "https://files.pythonhosted.org/packages/19/fd/81bfe3e23f418644660bad1a90f0d22f0b3eebe33dd65a79385530bceb3d/pyzmq-26.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccdff8ac4246b6fb60dcf3982dfaeeff5dd04f36051fe0632748fc0aa0679c01", size = 910133, upload-time = "2025-04-04T12:03:27.625Z" }, + { url = "https://files.pythonhosted.org/packages/97/68/321b9c775595ea3df832a9516252b653fe32818db66fdc8fa31c9b9fce37/pyzmq-26.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4550af385b442dc2d55ab7717837812799d3674cb12f9a3aa897611839c18e9e", size = 867428, upload-time = "2025-04-04T12:03:29.004Z" }, + { url = "https://files.pythonhosted.org/packages/4e/6e/159cbf2055ef36aa2aa297e01b24523176e5b48ead283c23a94179fb2ba2/pyzmq-26.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f7ffe9db1187a253fca95191854b3fda24696f086e8789d1d449308a34b88", size = 862409, upload-time = "2025-04-04T12:03:31.032Z" }, + { url = "https://files.pythonhosted.org/packages/05/1c/45fb8db7be5a7d0cadea1070a9cbded5199a2d578de2208197e592f219bd/pyzmq-26.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3709c9ff7ba61589b7372923fd82b99a81932b592a5c7f1a24147c91da9a68d6", size = 1205007, upload-time = "2025-04-04T12:03:32.687Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fa/658c7f583af6498b463f2fa600f34e298e1b330886f82f1feba0dc2dd6c3/pyzmq-26.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f8f3c30fb2d26ae5ce36b59768ba60fb72507ea9efc72f8f69fa088450cff1df", size = 1514599, upload-time = "2025-04-04T12:03:34.084Z" }, + { url = "https://files.pythonhosted.org/packages/4d/d7/44d641522353ce0a2bbd150379cb5ec32f7120944e6bfba4846586945658/pyzmq-26.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:382a4a48c8080e273427fc692037e3f7d2851959ffe40864f2db32646eeb3cef", size = 1414546, upload-time = "2025-04-04T12:03:35.478Z" }, + { url = "https://files.pythonhosted.org/packages/72/76/c8ed7263218b3d1e9bce07b9058502024188bd52cc0b0a267a9513b431fc/pyzmq-26.4.0-cp311-cp311-win32.whl", hash = "sha256:d56aad0517d4c09e3b4f15adebba8f6372c5102c27742a5bdbfc74a7dceb8fca", size = 579247, upload-time = "2025-04-04T12:03:36.846Z" }, + { url = "https://files.pythonhosted.org/packages/c3/d0/2d9abfa2571a0b1a67c0ada79a8aa1ba1cce57992d80f771abcdf99bb32c/pyzmq-26.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:963977ac8baed7058c1e126014f3fe58b3773f45c78cce7af5c26c09b6823896", size = 644727, upload-time = "2025-04-04T12:03:38.578Z" }, + { url = "https://files.pythonhosted.org/packages/0d/d1/c8ad82393be6ccedfc3c9f3adb07f8f3976e3c4802640fe3f71441941e70/pyzmq-26.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0c8e8cadc81e44cc5088fcd53b9b3b4ce9344815f6c4a03aec653509296fae3", size = 559942, upload-time = "2025-04-04T12:03:40.143Z" }, + { url = "https://files.pythonhosted.org/packages/10/44/a778555ebfdf6c7fc00816aad12d185d10a74d975800341b1bc36bad1187/pyzmq-26.4.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5227cb8da4b6f68acfd48d20c588197fd67745c278827d5238c707daf579227b", size = 1341586, upload-time = "2025-04-04T12:03:41.954Z" }, + { url = "https://files.pythonhosted.org/packages/9c/4f/f3a58dc69ac757e5103be3bd41fb78721a5e17da7cc617ddb56d973a365c/pyzmq-26.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1c07a7fa7f7ba86554a2b1bef198c9fed570c08ee062fd2fd6a4dcacd45f905", size = 665880, upload-time = "2025-04-04T12:03:43.45Z" }, + { url = "https://files.pythonhosted.org/packages/fe/45/50230bcfb3ae5cb98bee683b6edeba1919f2565d7cc1851d3c38e2260795/pyzmq-26.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae775fa83f52f52de73183f7ef5395186f7105d5ed65b1ae65ba27cb1260de2b", size = 902216, upload-time = "2025-04-04T12:03:45.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/59/56bbdc5689be5e13727491ad2ba5efd7cd564365750514f9bc8f212eef82/pyzmq-26.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c760d0226ebd52f1e6b644a9e839b5db1e107a23f2fcd46ec0569a4fdd4e63", size = 859814, upload-time = "2025-04-04T12:03:47.188Z" }, + { url = "https://files.pythonhosted.org/packages/81/b1/57db58cfc8af592ce94f40649bd1804369c05b2190e4cbc0a2dad572baeb/pyzmq-26.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ef8c6ecc1d520debc147173eaa3765d53f06cd8dbe7bd377064cdbc53ab456f5", size = 855889, upload-time = "2025-04-04T12:03:49.223Z" }, + { url = "https://files.pythonhosted.org/packages/e8/92/47542e629cbac8f221c230a6d0f38dd3d9cff9f6f589ed45fdf572ffd726/pyzmq-26.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3150ef4084e163dec29ae667b10d96aad309b668fac6810c9e8c27cf543d6e0b", size = 1197153, upload-time = "2025-04-04T12:03:50.591Z" }, + { url = "https://files.pythonhosted.org/packages/07/e5/b10a979d1d565d54410afc87499b16c96b4a181af46e7645ab4831b1088c/pyzmq-26.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4448c9e55bf8329fa1dcedd32f661bf611214fa70c8e02fee4347bc589d39a84", size = 1507352, upload-time = "2025-04-04T12:03:52.473Z" }, + { url = "https://files.pythonhosted.org/packages/ab/58/5a23db84507ab9c01c04b1232a7a763be66e992aa2e66498521bbbc72a71/pyzmq-26.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e07dde3647afb084d985310d067a3efa6efad0621ee10826f2cb2f9a31b89d2f", size = 1406834, upload-time = "2025-04-04T12:03:54Z" }, + { url = "https://files.pythonhosted.org/packages/22/74/aaa837b331580c13b79ac39396601fb361454ee184ca85e8861914769b99/pyzmq-26.4.0-cp312-cp312-win32.whl", hash = "sha256:ba034a32ecf9af72adfa5ee383ad0fd4f4e38cdb62b13624278ef768fe5b5b44", size = 577992, upload-time = "2025-04-04T12:03:55.815Z" }, + { url = "https://files.pythonhosted.org/packages/30/0f/55f8c02c182856743b82dde46b2dc3e314edda7f1098c12a8227eeda0833/pyzmq-26.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:056a97aab4064f526ecb32f4343917a4022a5d9efb6b9df990ff72e1879e40be", size = 640466, upload-time = "2025-04-04T12:03:57.231Z" }, + { url = "https://files.pythonhosted.org/packages/e4/29/073779afc3ef6f830b8de95026ef20b2d1ec22d0324d767748d806e57379/pyzmq-26.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:2f23c750e485ce1eb639dbd576d27d168595908aa2d60b149e2d9e34c9df40e0", size = 556342, upload-time = "2025-04-04T12:03:59.218Z" }, + { url = "https://files.pythonhosted.org/packages/d7/20/fb2c92542488db70f833b92893769a569458311a76474bda89dc4264bd18/pyzmq-26.4.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:c43fac689880f5174d6fc864857d1247fe5cfa22b09ed058a344ca92bf5301e3", size = 1339484, upload-time = "2025-04-04T12:04:00.671Z" }, + { url = "https://files.pythonhosted.org/packages/58/29/2f06b9cabda3a6ea2c10f43e67ded3e47fc25c54822e2506dfb8325155d4/pyzmq-26.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:902aca7eba477657c5fb81c808318460328758e8367ecdd1964b6330c73cae43", size = 666106, upload-time = "2025-04-04T12:04:02.366Z" }, + { url = "https://files.pythonhosted.org/packages/77/e4/dcf62bd29e5e190bd21bfccaa4f3386e01bf40d948c239239c2f1e726729/pyzmq-26.4.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e48a830bfd152fe17fbdeaf99ac5271aa4122521bf0d275b6b24e52ef35eb6", size = 902056, upload-time = "2025-04-04T12:04:03.919Z" }, + { url = "https://files.pythonhosted.org/packages/1a/cf/b36b3d7aea236087d20189bec1a87eeb2b66009731d7055e5c65f845cdba/pyzmq-26.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31be2b6de98c824c06f5574331f805707c667dc8f60cb18580b7de078479891e", size = 860148, upload-time = "2025-04-04T12:04:05.581Z" }, + { url = "https://files.pythonhosted.org/packages/18/a6/f048826bc87528c208e90604c3bf573801e54bd91e390cbd2dfa860e82dc/pyzmq-26.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6332452034be001bbf3206ac59c0d2a7713de5f25bb38b06519fc6967b7cf771", size = 855983, upload-time = "2025-04-04T12:04:07.096Z" }, + { url = "https://files.pythonhosted.org/packages/0a/27/454d34ab6a1d9772a36add22f17f6b85baf7c16e14325fa29e7202ca8ee8/pyzmq-26.4.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:da8c0f5dd352136853e6a09b1b986ee5278dfddfebd30515e16eae425c872b30", size = 1197274, upload-time = "2025-04-04T12:04:08.523Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3d/7abfeab6b83ad38aa34cbd57c6fc29752c391e3954fd12848bd8d2ec0df6/pyzmq-26.4.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f4ccc1a0a2c9806dda2a2dd118a3b7b681e448f3bb354056cad44a65169f6d86", size = 1507120, upload-time = "2025-04-04T12:04:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/13/ff/bc8d21dbb9bc8705126e875438a1969c4f77e03fc8565d6901c7933a3d01/pyzmq-26.4.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c0b5fceadbab461578daf8d1dcc918ebe7ddd2952f748cf30c7cf2de5d51101", size = 1406738, upload-time = "2025-04-04T12:04:12.509Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5d/d4cd85b24de71d84d81229e3bbb13392b2698432cf8fdcea5afda253d587/pyzmq-26.4.0-cp313-cp313-win32.whl", hash = "sha256:28e2b0ff5ba4b3dd11062d905682bad33385cfa3cc03e81abd7f0822263e6637", size = 577826, upload-time = "2025-04-04T12:04:14.289Z" }, + { url = "https://files.pythonhosted.org/packages/c6/6c/f289c1789d7bb6e5a3b3bef7b2a55089b8561d17132be7d960d3ff33b14e/pyzmq-26.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:23ecc9d241004c10e8b4f49d12ac064cd7000e1643343944a10df98e57bc544b", size = 640406, upload-time = "2025-04-04T12:04:15.757Z" }, + { url = "https://files.pythonhosted.org/packages/b3/99/676b8851cb955eb5236a0c1e9ec679ea5ede092bf8bf2c8a68d7e965cac3/pyzmq-26.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:1edb0385c7f025045d6e0f759d4d3afe43c17a3d898914ec6582e6f464203c08", size = 556216, upload-time = "2025-04-04T12:04:17.212Z" }, + { url = "https://files.pythonhosted.org/packages/65/c2/1fac340de9d7df71efc59d9c50fc7a635a77b103392d1842898dd023afcb/pyzmq-26.4.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:93a29e882b2ba1db86ba5dd5e88e18e0ac6b627026c5cfbec9983422011b82d4", size = 1333769, upload-time = "2025-04-04T12:04:18.665Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c7/6c03637e8d742c3b00bec4f5e4cd9d1c01b2f3694c6f140742e93ca637ed/pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45684f276f57110bb89e4300c00f1233ca631f08f5f42528a5c408a79efc4a", size = 658826, upload-time = "2025-04-04T12:04:20.405Z" }, + { url = "https://files.pythonhosted.org/packages/a5/97/a8dca65913c0f78e0545af2bb5078aebfc142ca7d91cdaffa1fbc73e5dbd/pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72073e75260cb301aad4258ad6150fa7f57c719b3f498cb91e31df16784d89b", size = 891650, upload-time = "2025-04-04T12:04:22.413Z" }, + { url = "https://files.pythonhosted.org/packages/7d/7e/f63af1031eb060bf02d033732b910fe48548dcfdbe9c785e9f74a6cc6ae4/pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be37e24b13026cfedd233bcbbccd8c0bcd2fdd186216094d095f60076201538d", size = 849776, upload-time = "2025-04-04T12:04:23.959Z" }, + { url = "https://files.pythonhosted.org/packages/f6/fa/1a009ce582802a895c0d5fe9413f029c940a0a8ee828657a3bb0acffd88b/pyzmq-26.4.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:237b283044934d26f1eeff4075f751b05d2f3ed42a257fc44386d00df6a270cf", size = 842516, upload-time = "2025-04-04T12:04:25.449Z" }, + { url = "https://files.pythonhosted.org/packages/6e/bc/f88b0bad0f7a7f500547d71e99f10336f2314e525d4ebf576a1ea4a1d903/pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b30f862f6768b17040929a68432c8a8be77780317f45a353cb17e423127d250c", size = 1189183, upload-time = "2025-04-04T12:04:27.035Z" }, + { url = "https://files.pythonhosted.org/packages/d9/8c/db446a3dd9cf894406dec2e61eeffaa3c07c3abb783deaebb9812c4af6a5/pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:c80fcd3504232f13617c6ab501124d373e4895424e65de8b72042333316f64a8", size = 1495501, upload-time = "2025-04-04T12:04:28.833Z" }, + { url = "https://files.pythonhosted.org/packages/05/4c/bf3cad0d64c3214ac881299c4562b815f05d503bccc513e3fd4fdc6f67e4/pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:26a2a7451606b87f67cdeca2c2789d86f605da08b4bd616b1a9981605ca3a364", size = 1395540, upload-time = "2025-04-04T12:04:30.562Z" }, + { url = "https://files.pythonhosted.org/packages/06/91/21d3af57bc77e86e9d1e5384f256fd25cdb4c8eed4c45c8119da8120915f/pyzmq-26.4.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a88643de8abd000ce99ca72056a1a2ae15881ee365ecb24dd1d9111e43d57842", size = 1340634, upload-time = "2025-04-04T12:04:47.661Z" }, + { url = "https://files.pythonhosted.org/packages/54/e6/58cd825023e998a0e49db7322b3211e6cf93f0796710b77d1496304c10d1/pyzmq-26.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a744ce209ecb557406fb928f3c8c55ce79b16c3eeb682da38ef5059a9af0848", size = 907880, upload-time = "2025-04-04T12:04:49.294Z" }, + { url = "https://files.pythonhosted.org/packages/72/83/619e44a766ef738cb7e8ed8e5a54565627801bdb027ca6dfb70762385617/pyzmq-26.4.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9434540f333332224ecb02ee6278b6c6f11ea1266b48526e73c903119b2f420f", size = 863003, upload-time = "2025-04-04T12:04:51Z" }, + { url = "https://files.pythonhosted.org/packages/b6/6a/a59af31320598bdc63d2c5a3181d14a89673c2c794540678285482e8a342/pyzmq-26.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c6f0a23e55cd38d27d4c89add963294ea091ebcb104d7fdab0f093bc5abb1c", size = 673432, upload-time = "2025-04-04T12:04:52.611Z" }, + { url = "https://files.pythonhosted.org/packages/29/ae/64dd6c18b08ce2cb009c60f11cf01c87f323acd80344d8b059c0304a7370/pyzmq-26.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6145df55dc2309f6ef72d70576dcd5aabb0fd373311613fe85a5e547c722b780", size = 1205221, upload-time = "2025-04-04T12:04:54.31Z" }, + { url = "https://files.pythonhosted.org/packages/d0/0b/c583ab750957b025244a66948831bc9ca486d11c820da4626caf6480ee1a/pyzmq-26.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2ea81823840ef8c56e5d2f9918e4d571236294fea4d1842b302aebffb9e40997", size = 1515299, upload-time = "2025-04-04T12:04:56.063Z" }, + { url = "https://files.pythonhosted.org/packages/22/ba/95ba76292c49dd9c6dff1f127b4867033020b708d101cba6e4fc5a3d166d/pyzmq-26.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc2abc385dc37835445abe206524fbc0c9e3fce87631dfaa90918a1ba8f425eb", size = 1415366, upload-time = "2025-04-04T12:04:58.241Z" }, + { url = "https://files.pythonhosted.org/packages/6e/65/51abe36169effda26ac7400ffac96f463e09dff40d344cdc2629d9a59162/pyzmq-26.4.0-cp39-cp39-win32.whl", hash = "sha256:41a2508fe7bed4c76b4cf55aacfb8733926f59d440d9ae2b81ee8220633b4d12", size = 580773, upload-time = "2025-04-04T12:04:59.786Z" }, + { url = "https://files.pythonhosted.org/packages/89/68/d9ac94086c63a0ed8d73e9e8aec54b39f481696698a5a939a7207629fb30/pyzmq-26.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4000e8255d6cbce38982e5622ebb90823f3409b7ffe8aeae4337ef7d6d2612a", size = 644340, upload-time = "2025-04-04T12:05:01.389Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8f/66c261d657c1b0791ee5b372c90b1646b453adb581fcdc1dc5c94e5b03e3/pyzmq-26.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f6919d9c120488246bdc2a2f96662fa80d67b35bd6d66218f457e722b3ff64", size = 560075, upload-time = "2025-04-04T12:05:02.975Z" }, + { url = "https://files.pythonhosted.org/packages/47/03/96004704a84095f493be8d2b476641f5c967b269390173f85488a53c1c13/pyzmq-26.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:98d948288ce893a2edc5ec3c438fe8de2daa5bbbd6e2e865ec5f966e237084ba", size = 834408, upload-time = "2025-04-04T12:05:04.569Z" }, + { url = "https://files.pythonhosted.org/packages/e4/7f/68d8f3034a20505db7551cb2260248be28ca66d537a1ac9a257913d778e4/pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9f34f5c9e0203ece706a1003f1492a56c06c0632d86cb77bcfe77b56aacf27b", size = 569580, upload-time = "2025-04-04T12:05:06.283Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a6/2b0d6801ec33f2b2a19dd8d02e0a1e8701000fec72926e6787363567d30c/pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80c9b48aef586ff8b698359ce22f9508937c799cc1d2c9c2f7c95996f2300c94", size = 798250, upload-time = "2025-04-04T12:05:07.88Z" }, + { url = "https://files.pythonhosted.org/packages/96/2a/0322b3437de977dcac8a755d6d7ce6ec5238de78e2e2d9353730b297cf12/pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f2a5b74009fd50b53b26f65daff23e9853e79aa86e0aa08a53a7628d92d44a", size = 756758, upload-time = "2025-04-04T12:05:09.483Z" }, + { url = "https://files.pythonhosted.org/packages/c2/33/43704f066369416d65549ccee366cc19153911bec0154da7c6b41fca7e78/pyzmq-26.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:61c5f93d7622d84cb3092d7f6398ffc77654c346545313a3737e266fc11a3beb", size = 555371, upload-time = "2025-04-04T12:05:11.062Z" }, + { url = "https://files.pythonhosted.org/packages/04/52/a70fcd5592715702248306d8e1729c10742c2eac44529984413b05c68658/pyzmq-26.4.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4478b14cb54a805088299c25a79f27eaf530564a7a4f72bf432a040042b554eb", size = 834405, upload-time = "2025-04-04T12:05:13.3Z" }, + { url = "https://files.pythonhosted.org/packages/25/f9/1a03f1accff16b3af1a6fa22cbf7ced074776abbf688b2e9cb4629700c62/pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a28ac29c60e4ba84b5f58605ace8ad495414a724fe7aceb7cf06cd0598d04e1", size = 569578, upload-time = "2025-04-04T12:05:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/76/0c/3a633acd762aa6655fcb71fa841907eae0ab1e8582ff494b137266de341d/pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b03c1ceea27c6520124f4fb2ba9c647409b9abdf9a62388117148a90419494", size = 798248, upload-time = "2025-04-04T12:05:17.376Z" }, + { url = "https://files.pythonhosted.org/packages/cd/cc/6c99c84aa60ac1cc56747bed6be8ce6305b9b861d7475772e7a25ce019d3/pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7731abd23a782851426d4e37deb2057bf9410848a4459b5ede4fe89342e687a9", size = 756757, upload-time = "2025-04-04T12:05:19.19Z" }, + { url = "https://files.pythonhosted.org/packages/13/9c/d8073bd898eb896e94c679abe82e47506e2b750eb261cf6010ced869797c/pyzmq-26.4.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a222ad02fbe80166b0526c038776e8042cd4e5f0dec1489a006a1df47e9040e0", size = 555371, upload-time = "2025-04-04T12:05:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/af/b2/71a644b629e1a93ccae9e22a45aec9d23065dfcc24c399cb837f81cd08c2/pyzmq-26.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:552b0d2e39987733e1e9e948a0ced6ff75e0ea39ab1a1db2fc36eb60fd8760db", size = 834397, upload-time = "2025-04-04T12:05:31.217Z" }, + { url = "https://files.pythonhosted.org/packages/a9/dd/052a25651eaaff8f5fd652fb40a3abb400e71207db2d605cf6faf0eac598/pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd670a8aa843f2ee637039bbd412e0d7294a5e588e1ecc9ad98b0cdc050259a4", size = 569571, upload-time = "2025-04-04T12:05:32.877Z" }, + { url = "https://files.pythonhosted.org/packages/a5/5d/201ca10b5d12ab187a418352c06d70c3e2087310af038b11056aba1359be/pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d367b7b775a0e1e54a59a2ba3ed4d5e0a31566af97cc9154e34262777dab95ed", size = 798243, upload-time = "2025-04-04T12:05:34.91Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d4/2c64e54749536ad1633400f28d71e71e19375d00ce1fe9bb1123364dc927/pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112af16c406e4a93df2caef49f884f4c2bb2b558b0b5577ef0b2465d15c1abc", size = 756751, upload-time = "2025-04-04T12:05:37.12Z" }, + { url = "https://files.pythonhosted.org/packages/08/e6/34d119af43d06a8dcd88bf7a62dac69597eaba52b49ecce76ff06b40f1fd/pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c76c298683f82669cab0b6da59071f55238c039738297c69f187a542c6d40099", size = 745400, upload-time = "2025-04-04T12:05:40.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/49/b5e471d74a63318e51f30d329b17d2550bdededaab55baed2e2499de7ce4/pyzmq-26.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:49b6ca2e625b46f499fb081aaf7819a177f41eeb555acb05758aa97f4f95d147", size = 555367, upload-time = "2025-04-04T12:05:42.356Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "requests" +version = "2.32.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, +] + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" }, +] + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/88/f270de456dd7d11dcc808abfa291ecdd3f45ff44e3b549ffa01b126464d0/rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055", size = 6760, upload-time = "2019-10-28T16:00:19.144Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/51/17023c0f8f1869d8806b979a2bffa3f861f26a3f1a66b094288323fba52f/rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9", size = 4242, upload-time = "2019-10-28T16:00:13.976Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.25.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/a6/60184b7fc00dd3ca80ac635dd5b8577d444c57e8e8742cecabfacb829921/rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3", size = 27304, upload-time = "2025-05-21T12:46:12.502Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/09/e1158988e50905b7f8306487a576b52d32aa9a87f79f7ab24ee8db8b6c05/rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9", size = 373140, upload-time = "2025-05-21T12:42:38.834Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4b/a284321fb3c45c02fc74187171504702b2934bfe16abab89713eedfe672e/rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40", size = 358860, upload-time = "2025-05-21T12:42:41.394Z" }, + { url = "https://files.pythonhosted.org/packages/4e/46/8ac9811150c75edeae9fc6fa0e70376c19bc80f8e1f7716981433905912b/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f", size = 386179, upload-time = "2025-05-21T12:42:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ec/87eb42d83e859bce91dcf763eb9f2ab117142a49c9c3d17285440edb5b69/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b", size = 400282, upload-time = "2025-05-21T12:42:44.92Z" }, + { url = "https://files.pythonhosted.org/packages/68/c8/2a38e0707d7919c8c78e1d582ab15cf1255b380bcb086ca265b73ed6db23/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa", size = 521824, upload-time = "2025-05-21T12:42:46.856Z" }, + { url = "https://files.pythonhosted.org/packages/5e/2c/6a92790243569784dde84d144bfd12bd45102f4a1c897d76375076d730ab/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e", size = 411644, upload-time = "2025-05-21T12:42:48.838Z" }, + { url = "https://files.pythonhosted.org/packages/eb/76/66b523ffc84cf47db56efe13ae7cf368dee2bacdec9d89b9baca5e2e6301/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da", size = 386955, upload-time = "2025-05-21T12:42:50.835Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b9/a362d7522feaa24dc2b79847c6175daa1c642817f4a19dcd5c91d3e2c316/rpds_py-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380", size = 421039, upload-time = "2025-05-21T12:42:52.348Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c4/b5b6f70b4d719b6584716889fd3413102acf9729540ee76708d56a76fa97/rpds_py-0.25.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9", size = 563290, upload-time = "2025-05-21T12:42:54.404Z" }, + { url = "https://files.pythonhosted.org/packages/87/a3/2e6e816615c12a8f8662c9d8583a12eb54c52557521ef218cbe3095a8afa/rpds_py-0.25.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54", size = 592089, upload-time = "2025-05-21T12:42:55.976Z" }, + { url = "https://files.pythonhosted.org/packages/c0/08/9b8e1050e36ce266135994e2c7ec06e1841f1c64da739daeb8afe9cb77a4/rpds_py-0.25.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2", size = 558400, upload-time = "2025-05-21T12:42:58.032Z" }, + { url = "https://files.pythonhosted.org/packages/f2/df/b40b8215560b8584baccd839ff5c1056f3c57120d79ac41bd26df196da7e/rpds_py-0.25.1-cp310-cp310-win32.whl", hash = "sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24", size = 219741, upload-time = "2025-05-21T12:42:59.479Z" }, + { url = "https://files.pythonhosted.org/packages/10/99/e4c58be18cf5d8b40b8acb4122bc895486230b08f978831b16a3916bd24d/rpds_py-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a", size = 231553, upload-time = "2025-05-21T12:43:01.425Z" }, + { url = "https://files.pythonhosted.org/packages/95/e1/df13fe3ddbbea43567e07437f097863b20c99318ae1f58a0fe389f763738/rpds_py-0.25.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d", size = 373341, upload-time = "2025-05-21T12:43:02.978Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/deef4d30fcbcbfef3b6d82d17c64490d5c94585a2310544ce8e2d3024f83/rpds_py-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255", size = 359111, upload-time = "2025-05-21T12:43:05.128Z" }, + { url = "https://files.pythonhosted.org/packages/bb/7e/39f1f4431b03e96ebaf159e29a0f82a77259d8f38b2dd474721eb3a8ac9b/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2", size = 386112, upload-time = "2025-05-21T12:43:07.13Z" }, + { url = "https://files.pythonhosted.org/packages/db/e7/847068a48d63aec2ae695a1646089620b3b03f8ccf9f02c122ebaf778f3c/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0", size = 400362, upload-time = "2025-05-21T12:43:08.693Z" }, + { url = "https://files.pythonhosted.org/packages/3b/3d/9441d5db4343d0cee759a7ab4d67420a476cebb032081763de934719727b/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f", size = 522214, upload-time = "2025-05-21T12:43:10.694Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ec/2cc5b30d95f9f1a432c79c7a2f65d85e52812a8f6cbf8768724571710786/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7", size = 411491, upload-time = "2025-05-21T12:43:12.739Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/44695c1f035077a017dd472b6a3253553780837af2fac9b6ac25f6a5cb4d/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd", size = 386978, upload-time = "2025-05-21T12:43:14.25Z" }, + { url = "https://files.pythonhosted.org/packages/b1/74/b4357090bb1096db5392157b4e7ed8bb2417dc7799200fcbaee633a032c9/rpds_py-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65", size = 420662, upload-time = "2025-05-21T12:43:15.8Z" }, + { url = "https://files.pythonhosted.org/packages/26/dd/8cadbebf47b96e59dfe8b35868e5c38a42272699324e95ed522da09d3a40/rpds_py-0.25.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f", size = 563385, upload-time = "2025-05-21T12:43:17.78Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ea/92960bb7f0e7a57a5ab233662f12152085c7dc0d5468534c65991a3d48c9/rpds_py-0.25.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d", size = 592047, upload-time = "2025-05-21T12:43:19.457Z" }, + { url = "https://files.pythonhosted.org/packages/61/ad/71aabc93df0d05dabcb4b0c749277881f8e74548582d96aa1bf24379493a/rpds_py-0.25.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042", size = 557863, upload-time = "2025-05-21T12:43:21.69Z" }, + { url = "https://files.pythonhosted.org/packages/93/0f/89df0067c41f122b90b76f3660028a466eb287cbe38efec3ea70e637ca78/rpds_py-0.25.1-cp311-cp311-win32.whl", hash = "sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc", size = 219627, upload-time = "2025-05-21T12:43:23.311Z" }, + { url = "https://files.pythonhosted.org/packages/7c/8d/93b1a4c1baa903d0229374d9e7aa3466d751f1d65e268c52e6039c6e338e/rpds_py-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4", size = 231603, upload-time = "2025-05-21T12:43:25.145Z" }, + { url = "https://files.pythonhosted.org/packages/cb/11/392605e5247bead2f23e6888e77229fbd714ac241ebbebb39a1e822c8815/rpds_py-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4", size = 223967, upload-time = "2025-05-21T12:43:26.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/81/28ab0408391b1dc57393653b6a0cf2014cc282cc2909e4615e63e58262be/rpds_py-0.25.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c", size = 364647, upload-time = "2025-05-21T12:43:28.559Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9a/7797f04cad0d5e56310e1238434f71fc6939d0bc517192a18bb99a72a95f/rpds_py-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b", size = 350454, upload-time = "2025-05-21T12:43:30.615Z" }, + { url = "https://files.pythonhosted.org/packages/69/3c/93d2ef941b04898011e5d6eaa56a1acf46a3b4c9f4b3ad1bbcbafa0bee1f/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa", size = 389665, upload-time = "2025-05-21T12:43:32.629Z" }, + { url = "https://files.pythonhosted.org/packages/c1/57/ad0e31e928751dde8903a11102559628d24173428a0f85e25e187defb2c1/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda", size = 403873, upload-time = "2025-05-21T12:43:34.576Z" }, + { url = "https://files.pythonhosted.org/packages/16/ad/c0c652fa9bba778b4f54980a02962748479dc09632e1fd34e5282cf2556c/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309", size = 525866, upload-time = "2025-05-21T12:43:36.123Z" }, + { url = "https://files.pythonhosted.org/packages/2a/39/3e1839bc527e6fcf48d5fec4770070f872cdee6c6fbc9b259932f4e88a38/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b", size = 416886, upload-time = "2025-05-21T12:43:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/7a/95/dd6b91cd4560da41df9d7030a038298a67d24f8ca38e150562644c829c48/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea", size = 390666, upload-time = "2025-05-21T12:43:40.065Z" }, + { url = "https://files.pythonhosted.org/packages/64/48/1be88a820e7494ce0a15c2d390ccb7c52212370badabf128e6a7bb4cb802/rpds_py-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65", size = 425109, upload-time = "2025-05-21T12:43:42.263Z" }, + { url = "https://files.pythonhosted.org/packages/cf/07/3e2a17927ef6d7720b9949ec1b37d1e963b829ad0387f7af18d923d5cfa5/rpds_py-0.25.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c", size = 567244, upload-time = "2025-05-21T12:43:43.846Z" }, + { url = "https://files.pythonhosted.org/packages/d2/e5/76cf010998deccc4f95305d827847e2eae9c568099c06b405cf96384762b/rpds_py-0.25.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd", size = 596023, upload-time = "2025-05-21T12:43:45.932Z" }, + { url = "https://files.pythonhosted.org/packages/52/9a/df55efd84403736ba37a5a6377b70aad0fd1cb469a9109ee8a1e21299a1c/rpds_py-0.25.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb", size = 561634, upload-time = "2025-05-21T12:43:48.263Z" }, + { url = "https://files.pythonhosted.org/packages/ab/aa/dc3620dd8db84454aaf9374bd318f1aa02578bba5e567f5bf6b79492aca4/rpds_py-0.25.1-cp312-cp312-win32.whl", hash = "sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe", size = 222713, upload-time = "2025-05-21T12:43:49.897Z" }, + { url = "https://files.pythonhosted.org/packages/a3/7f/7cef485269a50ed5b4e9bae145f512d2a111ca638ae70cc101f661b4defd/rpds_py-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192", size = 235280, upload-time = "2025-05-21T12:43:51.893Z" }, + { url = "https://files.pythonhosted.org/packages/99/f2/c2d64f6564f32af913bf5f3f7ae41c7c263c5ae4c4e8f1a17af8af66cd46/rpds_py-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728", size = 225399, upload-time = "2025-05-21T12:43:53.351Z" }, + { url = "https://files.pythonhosted.org/packages/2b/da/323848a2b62abe6a0fec16ebe199dc6889c5d0a332458da8985b2980dffe/rpds_py-0.25.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559", size = 364498, upload-time = "2025-05-21T12:43:54.841Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b4/4d3820f731c80fd0cd823b3e95b9963fec681ae45ba35b5281a42382c67d/rpds_py-0.25.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1", size = 350083, upload-time = "2025-05-21T12:43:56.428Z" }, + { url = "https://files.pythonhosted.org/packages/d5/b1/3a8ee1c9d480e8493619a437dec685d005f706b69253286f50f498cbdbcf/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c", size = 389023, upload-time = "2025-05-21T12:43:57.995Z" }, + { url = "https://files.pythonhosted.org/packages/3b/31/17293edcfc934dc62c3bf74a0cb449ecd549531f956b72287203e6880b87/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb", size = 403283, upload-time = "2025-05-21T12:43:59.546Z" }, + { url = "https://files.pythonhosted.org/packages/d1/ca/e0f0bc1a75a8925024f343258c8ecbd8828f8997ea2ac71e02f67b6f5299/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40", size = 524634, upload-time = "2025-05-21T12:44:01.087Z" }, + { url = "https://files.pythonhosted.org/packages/3e/03/5d0be919037178fff33a6672ffc0afa04ea1cfcb61afd4119d1b5280ff0f/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79", size = 416233, upload-time = "2025-05-21T12:44:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/05/7c/8abb70f9017a231c6c961a8941403ed6557664c0913e1bf413cbdc039e75/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325", size = 390375, upload-time = "2025-05-21T12:44:04.162Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ac/a87f339f0e066b9535074a9f403b9313fd3892d4a164d5d5f5875ac9f29f/rpds_py-0.25.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295", size = 424537, upload-time = "2025-05-21T12:44:06.175Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8f/8d5c1567eaf8c8afe98a838dd24de5013ce6e8f53a01bd47fe8bb06b5533/rpds_py-0.25.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b", size = 566425, upload-time = "2025-05-21T12:44:08.242Z" }, + { url = "https://files.pythonhosted.org/packages/95/33/03016a6be5663b389c8ab0bbbcca68d9e96af14faeff0a04affcb587e776/rpds_py-0.25.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98", size = 595197, upload-time = "2025-05-21T12:44:10.449Z" }, + { url = "https://files.pythonhosted.org/packages/33/8d/da9f4d3e208c82fda311bff0cf0a19579afceb77cf456e46c559a1c075ba/rpds_py-0.25.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd", size = 561244, upload-time = "2025-05-21T12:44:12.387Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b3/39d5dcf7c5f742ecd6dbc88f6f84ae54184b92f5f387a4053be2107b17f1/rpds_py-0.25.1-cp313-cp313-win32.whl", hash = "sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31", size = 222254, upload-time = "2025-05-21T12:44:14.261Z" }, + { url = "https://files.pythonhosted.org/packages/5f/19/2d6772c8eeb8302c5f834e6d0dfd83935a884e7c5ce16340c7eaf89ce925/rpds_py-0.25.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500", size = 234741, upload-time = "2025-05-21T12:44:16.236Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/145ada26cfaf86018d0eb304fe55eafdd4f0b6b84530246bb4a7c4fb5c4b/rpds_py-0.25.1-cp313-cp313-win_arm64.whl", hash = "sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5", size = 224830, upload-time = "2025-05-21T12:44:17.749Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ca/d435844829c384fd2c22754ff65889c5c556a675d2ed9eb0e148435c6690/rpds_py-0.25.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129", size = 359668, upload-time = "2025-05-21T12:44:19.322Z" }, + { url = "https://files.pythonhosted.org/packages/1f/01/b056f21db3a09f89410d493d2f6614d87bb162499f98b649d1dbd2a81988/rpds_py-0.25.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d", size = 345649, upload-time = "2025-05-21T12:44:20.962Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0f/e0d00dc991e3d40e03ca36383b44995126c36b3eafa0ccbbd19664709c88/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72", size = 384776, upload-time = "2025-05-21T12:44:22.516Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a2/59374837f105f2ca79bde3c3cd1065b2f8c01678900924949f6392eab66d/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34", size = 395131, upload-time = "2025-05-21T12:44:24.147Z" }, + { url = "https://files.pythonhosted.org/packages/9c/dc/48e8d84887627a0fe0bac53f0b4631e90976fd5d35fff8be66b8e4f3916b/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9", size = 520942, upload-time = "2025-05-21T12:44:25.915Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f5/ee056966aeae401913d37befeeab57a4a43a4f00099e0a20297f17b8f00c/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5", size = 411330, upload-time = "2025-05-21T12:44:27.638Z" }, + { url = "https://files.pythonhosted.org/packages/ab/74/b2cffb46a097cefe5d17f94ede7a174184b9d158a0aeb195f39f2c0361e8/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194", size = 387339, upload-time = "2025-05-21T12:44:29.292Z" }, + { url = "https://files.pythonhosted.org/packages/7f/9a/0ff0b375dcb5161c2b7054e7d0b7575f1680127505945f5cabaac890bc07/rpds_py-0.25.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6", size = 418077, upload-time = "2025-05-21T12:44:30.877Z" }, + { url = "https://files.pythonhosted.org/packages/0d/a1/fda629bf20d6b698ae84c7c840cfb0e9e4200f664fc96e1f456f00e4ad6e/rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78", size = 562441, upload-time = "2025-05-21T12:44:32.541Z" }, + { url = "https://files.pythonhosted.org/packages/20/15/ce4b5257f654132f326f4acd87268e1006cc071e2c59794c5bdf4bebbb51/rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72", size = 590750, upload-time = "2025-05-21T12:44:34.557Z" }, + { url = "https://files.pythonhosted.org/packages/fb/ab/e04bf58a8d375aeedb5268edcc835c6a660ebf79d4384d8e0889439448b0/rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66", size = 558891, upload-time = "2025-05-21T12:44:37.358Z" }, + { url = "https://files.pythonhosted.org/packages/90/82/cb8c6028a6ef6cd2b7991e2e4ced01c854b6236ecf51e81b64b569c43d73/rpds_py-0.25.1-cp313-cp313t-win32.whl", hash = "sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523", size = 218718, upload-time = "2025-05-21T12:44:38.969Z" }, + { url = "https://files.pythonhosted.org/packages/b6/97/5a4b59697111c89477d20ba8a44df9ca16b41e737fa569d5ae8bff99e650/rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763", size = 232218, upload-time = "2025-05-21T12:44:40.512Z" }, + { url = "https://files.pythonhosted.org/packages/89/74/716d42058ef501e2c08f27aa3ff455f6fc1bbbd19a6ab8dea07e6322d217/rpds_py-0.25.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd", size = 373475, upload-time = "2025-05-21T12:44:42.136Z" }, + { url = "https://files.pythonhosted.org/packages/e1/21/3faa9c523e2496a2505d7440b6f24c9166f37cb7ac027cac6cfbda9b4b5f/rpds_py-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634", size = 359349, upload-time = "2025-05-21T12:44:43.813Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/c747fe568d21b1d679079b52b926ebc4d1497457510a1773dc5fd4b7b4e2/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be", size = 386526, upload-time = "2025-05-21T12:44:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/0b/cc/4a41703de4fb291f13660fa3d882cbd39db5d60497c6e7fa7f5142e5e69f/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0", size = 400526, upload-time = "2025-05-21T12:44:47.011Z" }, + { url = "https://files.pythonhosted.org/packages/f1/78/60c980bedcad8418b614f0b4d6d420ecf11225b579cec0cb4e84d168b4da/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908", size = 525726, upload-time = "2025-05-21T12:44:48.838Z" }, + { url = "https://files.pythonhosted.org/packages/3f/37/f2f36b7f1314b3c3200d663decf2f8e29480492a39ab22447112aead4693/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a", size = 412045, upload-time = "2025-05-21T12:44:50.433Z" }, + { url = "https://files.pythonhosted.org/packages/df/96/e03783e87a775b1242477ccbc35895f8e9b2bbdb60e199034a6da03c2687/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9", size = 386953, upload-time = "2025-05-21T12:44:52.092Z" }, + { url = "https://files.pythonhosted.org/packages/7c/7d/1418f4b69bfb4b40481a3d84782113ad7d4cca0b38ae70b982dd5b20102a/rpds_py-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80", size = 421144, upload-time = "2025-05-21T12:44:53.734Z" }, + { url = "https://files.pythonhosted.org/packages/b3/0e/61469912c6493ee3808012e60f4930344b974fcb6b35c4348e70b6be7bc7/rpds_py-0.25.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a", size = 563730, upload-time = "2025-05-21T12:44:55.846Z" }, + { url = "https://files.pythonhosted.org/packages/f6/86/6d0a5cc56481ac61977b7c839677ed5c63d38cf0fcb3e2280843a8a6f476/rpds_py-0.25.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451", size = 592321, upload-time = "2025-05-21T12:44:57.514Z" }, + { url = "https://files.pythonhosted.org/packages/5d/87/d1e2453fe336f71e6aa296452a8c85c2118b587b1d25ce98014f75838a60/rpds_py-0.25.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f", size = 558162, upload-time = "2025-05-21T12:44:59.564Z" }, + { url = "https://files.pythonhosted.org/packages/ad/92/349f04b1644c5cef3e2e6c53b7168a28531945f9e6fca7425f6d20ddbc3c/rpds_py-0.25.1-cp39-cp39-win32.whl", hash = "sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449", size = 219920, upload-time = "2025-05-21T12:45:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/f2/84/3969bef883a3f37ff2213795257cb7b7e93a115829670befb8de0e003031/rpds_py-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890", size = 231452, upload-time = "2025-05-21T12:45:02.85Z" }, + { url = "https://files.pythonhosted.org/packages/78/ff/566ce53529b12b4f10c0a348d316bd766970b7060b4fd50f888be3b3b281/rpds_py-0.25.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28", size = 373931, upload-time = "2025-05-21T12:45:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/83/5d/deba18503f7c7878e26aa696e97f051175788e19d5336b3b0e76d3ef9256/rpds_py-0.25.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f", size = 359074, upload-time = "2025-05-21T12:45:06.714Z" }, + { url = "https://files.pythonhosted.org/packages/0d/74/313415c5627644eb114df49c56a27edba4d40cfd7c92bd90212b3604ca84/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13", size = 387255, upload-time = "2025-05-21T12:45:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c8/c723298ed6338963d94e05c0f12793acc9b91d04ed7c4ba7508e534b7385/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d", size = 400714, upload-time = "2025-05-21T12:45:10.39Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/51f1f6aa653c2e110ed482ef2ae94140d56c910378752a1b483af11019ee/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000", size = 523105, upload-time = "2025-05-21T12:45:12.273Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a4/7873d15c088ad3bff36910b29ceb0f178e4b3232c2adbe9198de68a41e63/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540", size = 411499, upload-time = "2025-05-21T12:45:13.95Z" }, + { url = "https://files.pythonhosted.org/packages/90/f3/0ce1437befe1410766d11d08239333ac1b2d940f8a64234ce48a7714669c/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b", size = 387918, upload-time = "2025-05-21T12:45:15.649Z" }, + { url = "https://files.pythonhosted.org/packages/94/d4/5551247988b2a3566afb8a9dba3f1d4a3eea47793fd83000276c1a6c726e/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e", size = 421705, upload-time = "2025-05-21T12:45:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/b0/25/5960f28f847bf736cc7ee3c545a7e1d2f3b5edaf82c96fb616c2f5ed52d0/rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8", size = 564489, upload-time = "2025-05-21T12:45:19.466Z" }, + { url = "https://files.pythonhosted.org/packages/02/66/1c99884a0d44e8c2904d3c4ec302f995292d5dde892c3bf7685ac1930146/rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8", size = 592557, upload-time = "2025-05-21T12:45:21.362Z" }, + { url = "https://files.pythonhosted.org/packages/55/ae/4aeac84ebeffeac14abb05b3bb1d2f728d00adb55d3fb7b51c9fa772e760/rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11", size = 558691, upload-time = "2025-05-21T12:45:23.084Z" }, + { url = "https://files.pythonhosted.org/packages/41/b3/728a08ff6f5e06fe3bb9af2e770e9d5fd20141af45cff8dfc62da4b2d0b3/rpds_py-0.25.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a", size = 231651, upload-time = "2025-05-21T12:45:24.72Z" }, + { url = "https://files.pythonhosted.org/packages/49/74/48f3df0715a585cbf5d34919c9c757a4c92c1a9eba059f2d334e72471f70/rpds_py-0.25.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954", size = 374208, upload-time = "2025-05-21T12:45:26.306Z" }, + { url = "https://files.pythonhosted.org/packages/55/b0/9b01bb11ce01ec03d05e627249cc2c06039d6aa24ea5a22a39c312167c10/rpds_py-0.25.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba", size = 359262, upload-time = "2025-05-21T12:45:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/a9/eb/5395621618f723ebd5116c53282052943a726dba111b49cd2071f785b665/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b", size = 387366, upload-time = "2025-05-21T12:45:30.42Z" }, + { url = "https://files.pythonhosted.org/packages/68/73/3d51442bdb246db619d75039a50ea1cf8b5b4ee250c3e5cd5c3af5981cd4/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038", size = 400759, upload-time = "2025-05-21T12:45:32.516Z" }, + { url = "https://files.pythonhosted.org/packages/b7/4c/3a32d5955d7e6cb117314597bc0f2224efc798428318b13073efe306512a/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9", size = 523128, upload-time = "2025-05-21T12:45:34.396Z" }, + { url = "https://files.pythonhosted.org/packages/be/95/1ffccd3b0bb901ae60b1dd4b1be2ab98bb4eb834cd9b15199888f5702f7b/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1", size = 411597, upload-time = "2025-05-21T12:45:36.164Z" }, + { url = "https://files.pythonhosted.org/packages/ef/6d/6e6cd310180689db8b0d2de7f7d1eabf3fb013f239e156ae0d5a1a85c27f/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762", size = 388053, upload-time = "2025-05-21T12:45:38.45Z" }, + { url = "https://files.pythonhosted.org/packages/4a/87/ec4186b1fe6365ced6fa470960e68fc7804bafbe7c0cf5a36237aa240efa/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e", size = 421821, upload-time = "2025-05-21T12:45:40.732Z" }, + { url = "https://files.pythonhosted.org/packages/7a/60/84f821f6bf4e0e710acc5039d91f8f594fae0d93fc368704920d8971680d/rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692", size = 564534, upload-time = "2025-05-21T12:45:42.672Z" }, + { url = "https://files.pythonhosted.org/packages/41/3a/bc654eb15d3b38f9330fe0f545016ba154d89cdabc6177b0295910cd0ebe/rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf", size = 592674, upload-time = "2025-05-21T12:45:44.533Z" }, + { url = "https://files.pythonhosted.org/packages/2e/ba/31239736f29e4dfc7a58a45955c5db852864c306131fd6320aea214d5437/rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe", size = 558781, upload-time = "2025-05-21T12:45:46.281Z" }, + { url = "https://files.pythonhosted.org/packages/78/b2/198266f070c6760e0e8cd00f9f2b9c86133ceebbe7c6d114bdcfea200180/rpds_py-0.25.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b", size = 373973, upload-time = "2025-05-21T12:45:48.081Z" }, + { url = "https://files.pythonhosted.org/packages/13/79/1265eae618f88aa5d5e7122bd32dd41700bafe5a8bcea404e998848cd844/rpds_py-0.25.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23", size = 359326, upload-time = "2025-05-21T12:45:49.825Z" }, + { url = "https://files.pythonhosted.org/packages/30/ab/6913b96f3ac072e87e76e45fe938263b0ab0d78b6b2cef3f2e56067befc0/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e", size = 387544, upload-time = "2025-05-21T12:45:51.764Z" }, + { url = "https://files.pythonhosted.org/packages/b0/23/129ed12d25229acc6deb8cbe90baadd8762e563c267c9594eb2fcc15be0c/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7", size = 400240, upload-time = "2025-05-21T12:45:54.061Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e0/6811a38a5efa46b7ee6ed2103c95cb9abb16991544c3b69007aa679b6944/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83", size = 525599, upload-time = "2025-05-21T12:45:56.457Z" }, + { url = "https://files.pythonhosted.org/packages/6c/10/2dc88bcaa0d86bdb59e017a330b1972ffeeb7f5061bb5a180c9a2bb73bbf/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b", size = 411154, upload-time = "2025-05-21T12:45:58.525Z" }, + { url = "https://files.pythonhosted.org/packages/cf/d1/a72d522eb7d934fb33e9c501e6ecae00e2035af924d4ff37d964e9a3959b/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf", size = 388297, upload-time = "2025-05-21T12:46:00.264Z" }, + { url = "https://files.pythonhosted.org/packages/55/90/0dd7169ec74f042405b6b73512200d637a3088c156f64e1c07c18aa2fe59/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1", size = 421894, upload-time = "2025-05-21T12:46:02.065Z" }, + { url = "https://files.pythonhosted.org/packages/37/e9/45170894add451783ed839c5c4a495e050aa8baa06d720364d9dff394dac/rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1", size = 564409, upload-time = "2025-05-21T12:46:03.891Z" }, + { url = "https://files.pythonhosted.org/packages/59/d0/31cece9090e76fbdb50c758c165d40da604b03b37c3ba53f010bbfeb130a/rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf", size = 592681, upload-time = "2025-05-21T12:46:06.009Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4c/22ef535efb2beec614ba7be83e62b439eb83b0b0d7b1775e22d35af3f9b5/rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992", size = 558744, upload-time = "2025-05-21T12:46:07.78Z" }, + { url = "https://files.pythonhosted.org/packages/79/ff/f2150efc8daf0581d4dfaf0a2a30b08088b6df900230ee5ae4f7c8cd5163/rpds_py-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793", size = 231305, upload-time = "2025-05-21T12:46:10.52Z" }, +] + +[[package]] +name = "semantic-version" +version = "2.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/31/f2289ce78b9b473d582568c234e104d2a342fd658cc288a7553d83bb8595/semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c", size = 52289, upload-time = "2022-05-26T13:35:23.454Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/23/8146aad7d88f4fcb3a6218f41a60f6c2d4e3a72de72da1825dc7c8f7877c/semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177", size = 15552, upload-time = "2022-05-26T13:35:21.206Z" }, +] + +[[package]] +name = "send2trash" +version = "1.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/3a/aec9b02217bb79b87bbc1a21bc6abc51e3d5dcf65c30487ac96c0908c722/Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf", size = 17394, upload-time = "2024-04-07T00:01:09.267Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/b0/4562db6223154aa4e22f939003cb92514c79f3d4dccca3444253fd17f902/Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9", size = 18072, upload-time = "2024-04-07T00:01:07.438Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "setuptools-rust" +version = "1.11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "semantic-version" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/92/bf8589b1a2b6107cf9ec8daa9954c0b7620643fe1f37d31d75e572d995f5/setuptools_rust-1.11.1.tar.gz", hash = "sha256:7dabc4392252ced314b8050d63276e05fdc5d32398fc7d3cce1f6a6ac35b76c0", size = 310804, upload-time = "2025-04-04T14:28:10.576Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/01/37e1376f80578882e4f2d451f57d1fb42a599832057a123f57d9f26395c8/setuptools_rust-1.11.1-py3-none-any.whl", hash = "sha256:5eaaddaed268dc24a527ffa659ce56b22d3cf17b781247b779efd611031fe8ea", size = 28120, upload-time = "2025-04-04T14:28:09.564Z" }, +] + +[[package]] +name = "setuptools-scm" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "packaging" }, + { name = "setuptools" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/19/7ae64b70b2429c48c3a7a4ed36f50f94687d3bfcd0ae2f152367b6410dff/setuptools_scm-8.3.1.tar.gz", hash = "sha256:3d555e92b75dacd037d32bafdf94f97af51ea29ae8c7b234cf94b7a5bd242a63", size = 78088, upload-time = "2025-04-23T11:53:19.739Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/ac/8f96ba9b4cfe3e4ea201f23f4f97165862395e9331a424ed325ae37024a8/setuptools_scm-8.3.1-py3-none-any.whl", hash = "sha256:332ca0d43791b818b841213e76b1971b7711a960761c5bea5fc5cdb5196fbce3", size = 43935, upload-time = "2025-04-23T11:53:17.922Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "slotscheck" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/57/6fcb8df11e7c76eb87b23bfa931408e47f051c6161749c531b4060a45516/slotscheck-0.19.1.tar.gz", hash = "sha256:6146b7747f8db335a00a66b782f86011b74b995f61746dc5b36a9e77d5326013", size = 16050, upload-time = "2024-10-19T13:30:53.369Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/32/bd569256267f80b76b87d21a09795741a175778b954bee1d7b1a89852b6f/slotscheck-0.19.1-py3-none-any.whl", hash = "sha256:bff9926f8d6408ea21b6c6bbaa4389cea1682962e73ee4f30084b6d2b89260ee", size = 16995, upload-time = "2024-10-19T13:30:51.23Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "babel", marker = "python_full_version < '3.10'" }, + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "imagesize", marker = "python_full_version < '3.10'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "requests", marker = "python_full_version < '3.10'" }, + { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911, upload-time = "2024-07-20T14:46:56.059Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624, upload-time = "2024-07-20T14:46:52.142Z" }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version >= '3.11' and python_full_version < '3.13'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "babel", marker = "python_full_version >= '3.10'" }, + { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version >= '3.10'" }, + { name = "imagesize", marker = "python_full_version >= '3.10'" }, + { name = "jinja2", marker = "python_full_version >= '3.10'" }, + { name = "packaging", marker = "python_full_version >= '3.10'" }, + { name = "pygments", marker = "python_full_version >= '3.10'" }, + { name = "requests", marker = "python_full_version >= '3.10'" }, + { name = "snowballstemmer", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.10'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611, upload-time = "2024-10-13T20:27:13.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125, upload-time = "2024-10-13T20:27:10.448Z" }, +] + +[[package]] +name = "sphinx-rtd-theme" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-jquery" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463, upload-time = "2024-11-13T11:06:04.545Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561, upload-time = "2024-11-13T11:06:02.094Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens" }, + { name = "executing" }, + { name = "pure-eval" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, +] + +[[package]] +name = "stdlibs" +version = "2025.5.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/48/6f/92324b26048ff76b02dbb616d26b51a57e751bac7a7934016bb25a407725/stdlibs-2025.5.10.tar.gz", hash = "sha256:75d55a0b7b070ec44bd7dae5bc1ee1a6cea742122fb4253313cb4ab354f7f0c5", size = 19625, upload-time = "2025-05-11T03:46:42.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/51/a8f17bbb8f01cef657153972a99e382ce5c5e33a1a2df959f3ed2ebe2b89/stdlibs-2025.5.10-py3-none-any.whl", hash = "sha256:25178d9c2b45d2680292413bf59a20293355d45056ec92d32ea6ed349ce9e2a1", size = 57264, upload-time = "2025-05-11T03:46:41.633Z" }, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, +] + +[[package]] +name = "terminado" +version = "0.18.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess", marker = "os_name != 'nt'" }, + { name = "pywinpty", marker = "os_name == 'nt'" }, + { name = "tornado" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/11/965c6fd8e5cc254f1fe142d547387da17a8ebfd75a3455f637c663fb38a0/terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e", size = 32701, upload-time = "2024-03-12T14:34:39.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154, upload-time = "2024-03-12T14:34:36.569Z" }, +] + +[[package]] +name = "testslide" +version = "2.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "psutil" }, + { name = "pygments" }, + { name = "typeguard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ee/6f/c8d6d60a597c693559dab3b3362bd01e2212530e9a163eb0164af81e1ec1/TestSlide-2.7.1.tar.gz", hash = "sha256:d25890d5c383f673fac44a5f9e2561b7118d04f29f2c2b3d4f549e6db94cb34d", size = 50255, upload-time = "2023-03-16T14:09:41.204Z" } + +[[package]] +name = "tinycss2" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" }, +] + +[[package]] +name = "toml" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "tomlkit" +version = "0.13.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/18/0bbf3884e9eaa38819ebe46a7bd25dcd56b67434402b66a58c4b8e552575/tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1", size = 185207, upload-time = "2025-06-05T07:13:44.947Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/75/8539d011f6be8e29f339c42e633aae3cb73bffa95dd0f9adec09b9c58e85/tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0", size = 38901, upload-time = "2025-06-05T07:13:43.546Z" }, +] + +[[package]] +name = "tornado" +version = "6.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/89/c72771c81d25d53fe33e3dca61c233b665b2780f21820ba6fd2c6793c12b/tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c", size = 509934, upload-time = "2025-05-22T18:15:38.788Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/89/f4532dee6843c9e0ebc4e28d4be04c67f54f60813e4bf73d595fe7567452/tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7", size = 441948, upload-time = "2025-05-22T18:15:20.862Z" }, + { url = "https://files.pythonhosted.org/packages/15/9a/557406b62cffa395d18772e0cdcf03bed2fff03b374677348eef9f6a3792/tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6", size = 440112, upload-time = "2025-05-22T18:15:22.591Z" }, + { url = "https://files.pythonhosted.org/packages/55/82/7721b7319013a3cf881f4dffa4f60ceff07b31b394e459984e7a36dc99ec/tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888", size = 443672, upload-time = "2025-05-22T18:15:24.027Z" }, + { url = "https://files.pythonhosted.org/packages/7d/42/d11c4376e7d101171b94e03cef0cbce43e823ed6567ceda571f54cf6e3ce/tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331", size = 443019, upload-time = "2025-05-22T18:15:25.735Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f7/0c48ba992d875521ac761e6e04b0a1750f8150ae42ea26df1852d6a98942/tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e", size = 443252, upload-time = "2025-05-22T18:15:27.499Z" }, + { url = "https://files.pythonhosted.org/packages/89/46/d8d7413d11987e316df4ad42e16023cd62666a3c0dfa1518ffa30b8df06c/tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401", size = 443930, upload-time = "2025-05-22T18:15:29.299Z" }, + { url = "https://files.pythonhosted.org/packages/78/b2/f8049221c96a06df89bed68260e8ca94beca5ea532ffc63b1175ad31f9cc/tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692", size = 443351, upload-time = "2025-05-22T18:15:31.038Z" }, + { url = "https://files.pythonhosted.org/packages/76/ff/6a0079e65b326cc222a54720a748e04a4db246870c4da54ece4577bfa702/tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a", size = 443328, upload-time = "2025-05-22T18:15:32.426Z" }, + { url = "https://files.pythonhosted.org/packages/49/18/e3f902a1d21f14035b5bc6246a8c0f51e0eef562ace3a2cea403c1fb7021/tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365", size = 444396, upload-time = "2025-05-22T18:15:34.205Z" }, + { url = "https://files.pythonhosted.org/packages/7b/09/6526e32bf1049ee7de3bebba81572673b19a2a8541f795d887e92af1a8bc/tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b", size = 444840, upload-time = "2025-05-22T18:15:36.1Z" }, + { url = "https://files.pythonhosted.org/packages/55/a7/535c44c7bea4578e48281d83c615219f3ab19e6abc67625ef637c73987be/tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7", size = 443596, upload-time = "2025-05-22T18:15:37.433Z" }, +] + +[[package]] +name = "trailrunner" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pathspec" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/93/630e10bacd897daeb9ff5a408f4e7cb0fc2f243e7e3ef00f9e6cf319b11c/trailrunner-1.4.0.tar.gz", hash = "sha256:3fe61e259e6b2e5192f321c265985b7a0dc18497ced62b2da244f08104978398", size = 15836, upload-time = "2023-03-27T07:54:35.515Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/29/21001afea86bac5016c3940b43de3ce4786b0d8337d4ea79bb903c649ce3/trailrunner-1.4.0-py3-none-any.whl", hash = "sha256:a286d39f2723f28d167347f41cf8f232832648709366e722f55cf5545772a48e", size = 11071, upload-time = "2023-03-27T07:54:32.514Z" }, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, +] + +[[package]] +name = "typeguard" +version = "2.13.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/38/c61bfcf62a7b572b5e9363a802ff92559cb427ee963048e1442e3aef7490/typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4", size = 40604, upload-time = "2021-12-10T21:09:39.158Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/bb/d43e5c75054e53efce310e79d63df0ac3f25e34c926be5dffb7d283fb2a8/typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1", size = 17605, upload-time = "2021-12-10T21:09:37.844Z" }, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20250516" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/88/d65ed807393285204ab6e2801e5d11fbbea811adcaa979a2ed3b67a5ef41/types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5", size = 13943, upload-time = "2025-05-16T03:06:58.385Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/3f/b0e8db149896005adc938a1e7f371d6d7e9eca4053a29b108978ed15e0c2/types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93", size = 14356, upload-time = "2025-05-16T03:06:57.249Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" }, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, +] + +[[package]] +name = "ufmt" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "black" }, + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "libcst" }, + { name = "moreorless" }, + { name = "tomlkit" }, + { name = "trailrunner" }, + { name = "typing-extensions" }, + { name = "usort" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/f8/c25e242a8e12062172dea4117859757a11339bbc39b1a3c7fb6a6de03bb2/ufmt-2.8.0.tar.gz", hash = "sha256:72c9502915497678de9aeab8aa18604890f14f869f7f378dd26e2878bde84f13", size = 24482, upload-time = "2024-10-25T06:21:57.239Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/4b/3f1b6f566b6cf70ccc5cba9a638fe4459f1e373c34d74df2e40e41871d70/ufmt-2.8.0-py3-none-any.whl", hash = "sha256:47a690811c576ebd3a0e30d77d43b65c84240e5c1611e5cb4a880bdd7f4507c1", size = 28268, upload-time = "2024-10-25T06:21:55.822Z" }, +] + +[[package]] +name = "uri-template" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/31/c7/0336f2bd0bcbada6ccef7aaa25e443c118a704f828a0620c6fa0207c1b64/uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7", size = 21678, upload-time = "2023-06-21T01:49:05.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140, upload-time = "2023-06-21T01:49:03.467Z" }, +] + +[[package]] +name = "urllib3" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, +] + +[[package]] +name = "usort" +version = "1.0.8.post1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "libcst" }, + { name = "moreorless" }, + { name = "stdlibs" }, + { name = "toml" }, + { name = "trailrunner" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/f4/3ef48b43f2645f2cb4a37d6007e611bc669af44eecfee953c5dd57433011/usort-1.0.8.post1.tar.gz", hash = "sha256:68def75f2b20b97390c552c503e071ee06c65ad502c5f94f3bd03f095cf4dfe6", size = 83215, upload-time = "2024-02-12T04:29:33.632Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/55/cc51ceb3d93763b9d28def24615bc485212525550967ce9e992a455f9ab5/usort-1.0.8.post1-py3-none-any.whl", hash = "sha256:6c57cdf17b458c79f8a61eb3ce8bf3f93e36d3c2edd602b9b2aa16b6875d3255", size = 37281, upload-time = "2024-02-12T04:29:31.693Z" }, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, +] + +[[package]] +name = "webcolors" +version = "24.11.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/29/061ec845fb58521848f3739e466efd8250b4b7b98c1b6c5bf4d40b419b7e/webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6", size = 45064, upload-time = "2024-11-11T07:43:24.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/e8/c0e05e4684d13459f93d312077a9a2efbe04d59c393bc2b8802248c908d4/webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9", size = 14934, upload-time = "2024-11-11T07:43:22.529Z" }, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, +] + +[[package]] +name = "websocket-client" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, +] + +[[package]] +name = "widgetsnbextension" +version = "4.0.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/53/2e0253c5efd69c9656b1843892052a31c36d37ad42812b5da45c62191f7e/widgetsnbextension-4.0.14.tar.gz", hash = "sha256:a3629b04e3edb893212df862038c7232f62973373869db5084aed739b437b5af", size = 1097428, upload-time = "2025-04-10T13:01:25.628Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/51/5447876806d1088a0f8f71e16542bf350918128d0a69437df26047c8e46f/widgetsnbextension-4.0.14-py3-none-any.whl", hash = "sha256:4875a9eaf72fbf5079dc372a51a9f268fc38d46f767cbf85c43a36da5cb9b575", size = 2196503, upload-time = "2025-04-10T13:01:23.086Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] From 67ba746bed86e873065cf170c462894fcb367862 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Thu, 12 Jun 2025 18:57:20 +0800 Subject: [PATCH 602/632] fix(dependency): add back typing-extensions for 3.9 (#1358) Missing typing-extensions breaks "from libcst.codemod import CodemodContext" --- pyproject.toml | 1 + uv.lock | 2 ++ 2 files changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index b4b15320..6b78811e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ requires-python = ">=3.9" dependencies = [ "pyyaml>=5.2; python_version < '3.13'", "pyyaml-ft>=8.0.0; python_version >= '3.13'", + "typing-extensions; python_version < '3.10'", ] [project.urls] diff --git a/uv.lock b/uv.lock index ad68fefe..2cc4db06 100644 --- a/uv.lock +++ b/uv.lock @@ -1207,6 +1207,7 @@ source = { editable = "." } dependencies = [ { name = "pyyaml", marker = "python_full_version < '3.13'" }, { name = "pyyaml-ft", marker = "python_full_version >= '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] [package.dev-dependencies] @@ -1257,6 +1258,7 @@ docs = [ requires-dist = [ { name = "pyyaml", marker = "python_full_version < '3.13'", specifier = ">=5.2" }, { name = "pyyaml-ft", marker = "python_full_version >= '3.13'", specifier = ">=8.0.0" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] [package.metadata.requires-dev] From 03285dd4bf655d3bf4d89989dfde0cd885abefc3 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 13 Jun 2025 21:36:01 +0100 Subject: [PATCH 603/632] bump version to 1.8.2 (#1360) --- CHANGELOG.md | 10 ++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 15 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 945d056d..3a8fb401 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ +# 1.8.2 - 2025-06-13 + +# Fixed +* fix(dependency): add back typing-extensions for 3.9 by @Lee-W in https://github.com/Instagram/LibCST/pull/1358 + +## New Contributors +* @Lee-W made their first contribution in https://github.com/Instagram/LibCST/pull/1358 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.1...v1.8.2 + # 1.8.1 - 2025-06-10 ## Added diff --git a/native/Cargo.lock b/native/Cargo.lock index 9501a525..c282c17f 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -286,7 +286,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.8.1" +version = "1.8.2" dependencies = [ "annotate-snippets", "criterion", @@ -304,7 +304,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.8.1" +version = "1.8.2" dependencies = [ "quote", "syn", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 2e0b7be3..1615a584 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.8.1" +version = "1.8.2" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.5" annotate-snippets = "0.11.5" regex = "1.11.1" memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.8.1" } +libcst_derive = { path = "../libcst_derive", version = "1.8.2" } [dev-dependencies] criterion = { version = "0.6.0", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 566f74e2..f538e46e 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.8.1" +version = "1.8.2" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From 287ab059a02bcaca00e77dceaaefc7c90954eb3b Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 15 Jun 2025 11:46:04 +0100 Subject: [PATCH 604/632] bump pyo3 to 0.25.1 (#1361) --- native/Cargo.lock | 20 ++++++++++---------- native/libcst/Cargo.toml | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index c282c17f..ee5f0d57 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -443,9 +443,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.25.0" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f239d656363bcee73afef85277f1b281e8ac6212a1d42aa90e55b90ed43c47a4" +checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a" dependencies = [ "indoc", "libc", @@ -460,9 +460,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.25.0" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755ea671a1c34044fa165247aaf6f419ca39caa6003aee791a0df2713d8f1b6d" +checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598" dependencies = [ "once_cell", "target-lexicon", @@ -470,9 +470,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.25.0" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc95a2e67091e44791d4ea300ff744be5293f394f1bafd9f78c080814d35956e" +checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c" dependencies = [ "libc", "pyo3-build-config", @@ -480,9 +480,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.25.0" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a179641d1b93920829a62f15e87c0ed791b6c8db2271ba0fd7c2686090510214" +checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -492,9 +492,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.25.0" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dff85ebcaab8c441b0e3f7ae40a6963ecea8a9f5e74f647e33fcf5ec9a1e89e" +checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc" dependencies = [ "heck", "proc-macro2", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 1615a584..5b9f2f86 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.15" -pyo3 = { version = "0.25", optional = true } +pyo3 = { version = "0.25.1", optional = true } thiserror = "2.0.12" peg = "0.8.5" annotate-snippets = "0.11.5" From 4bc2116d2a6d4c48f263a1838c2b2d8c2d1dbea4 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Sun, 15 Jun 2025 12:39:36 +0100 Subject: [PATCH 605/632] ci: test built wheels (#1359) * bump uv version * bump cibuildwheel to v3 * enable GIL for smoke tests for now --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 12 ++++++++---- .github/workflows/pypi_upload.yml | 2 +- pyproject.toml | 6 ++++++ 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 921e871e..065ef68f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -44,7 +44,7 @@ jobs: if: github.event_name != 'release' run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v3.0.0rc2 + uses: pypa/cibuildwheel@v3.0.0 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce969e24..5e176f65 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v5 with: - version: "0.7.12" + version: "0.7.13" python-version: ${{ matrix.python-version }} - uses: actions/checkout@v4 with: @@ -60,7 +60,7 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v5 with: - version: "0.7.12" + version: "0.7.13" python-version: "3.10" - run: uv run poe lint - run: uv run poe fixtures @@ -76,7 +76,7 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v5 with: - version: "0.7.12" + version: "0.7.13" python-version: "3.10" - run: uv run poe typecheck @@ -91,7 +91,7 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v5 with: - version: "0.7.12" + version: "0.7.13" python-version: "3.10" - uses: ts-graphviz/setup-graphviz@v2 - run: uv run --group docs poe docs @@ -143,3 +143,7 @@ jobs: - run: rustup component add rustfmt - name: format run: cargo fmt --all --manifest-path=native/Cargo.toml -- --check + build: + # only trigger here for pull requests - regular pushes are handled in pypi_upload + if: ${{ github.event_name == 'pull_request' }} + uses: Instagram/LibCST/.github/workflows/build.yml@main diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 3a16857a..92145c1d 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -36,7 +36,7 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v5 with: - version: "0.7.12" + version: "0.7.13" enable-cache: false - name: Build a source tarball env: diff --git a/pyproject.toml b/pyproject.toml index 6b78811e..3ebaaef1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,6 +109,12 @@ skip = [ "*-musllinux_armv7l", ] enable = ["cpython-freethreading"] +test-command = [ + "python --version", + "python -m libcst.tool list", + # TODO: remove the gil once thread-safety issues are resolved + "python -X gil=1 -m libcst.tool codemod remove_unused_imports.RemoveUnusedImportsCommand {project}/libcst/_nodes", +] [tool.cibuildwheel.linux] environment-pass = ["LIBCST_NO_LOCAL_SCHEME"] From 2fb4b2dd58537952b709f3730850b651c1dd4974 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Jun 2025 07:49:28 +0100 Subject: [PATCH 606/632] build(deps): bump astral-sh/setup-uv from 5 to 6 (#1365) Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 5 to 6. - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v5...v6) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 8 ++++---- .github/workflows/pypi_upload.yml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5e176f65..5147f457 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,7 +26,7 @@ jobs: - "3.14t" steps: - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: "0.7.13" python-version: ${{ matrix.python-version }} @@ -58,7 +58,7 @@ jobs: fetch-depth: 0 persist-credentials: false - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: "0.7.13" python-version: "3.10" @@ -74,7 +74,7 @@ jobs: fetch-depth: 0 persist-credentials: false - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: "0.7.13" python-version: "3.10" @@ -89,7 +89,7 @@ jobs: fetch-depth: 0 persist-credentials: false - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: "0.7.13" python-version: "3.10" diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 92145c1d..fd69d5fc 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -34,7 +34,7 @@ jobs: with: python-version: "3.10" - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: "0.7.13" enable-cache: false From 2931c86e07097ca33afe6269024e462bb996aaf7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Jul 2025 16:44:33 -0400 Subject: [PATCH 607/632] build(deps): bump pypa/cibuildwheel from 3.0.0 to 3.0.1 (#1373) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 3.0.0 to 3.0.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v3.0.0...v3.0.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-version: 3.0.1 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 065ef68f..bffe5fca 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -44,7 +44,7 @@ jobs: if: github.event_name != 'release' run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v3.0.0 + uses: pypa/cibuildwheel@v3.0.1 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From aa539604585d263033c777e78d01a438d13e17c9 Mon Sep 17 00:00:00 2001 From: Hunter Hogan Date: Tue, 15 Jul 2025 14:22:23 -0500 Subject: [PATCH 608/632] Fix typos in tutorial.ipynb (#1378) --- docs/source/tutorial.ipynb | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/source/tutorial.ipynb b/docs/source/tutorial.ipynb index 3e6bdac0..1b1ad00d 100644 --- a/docs/source/tutorial.ipynb +++ b/docs/source/tutorial.ipynb @@ -10,7 +10,7 @@ "Parsing and Visiting\n", "====================\n", "\n", - "LibCST provides helpers to parse source code string as concrete syntax tree. In order to perform static analysis to identify patterns in the tree or modify the tree programmatically, we can use visitor pattern to traverse the tree. In this tutorial, we demonstrate a common four-step-workflow to build an automated refactoring (codemod) application:\n", + "LibCST provides helpers to parse source code string as a concrete syntax tree. In order to perform static analysis to identify patterns in the tree or modify the tree programmatically, we can use the visitor pattern to traverse the tree. In this tutorial, we demonstrate a common four-step-workflow to build an automated refactoring (codemod) application:\n", "\n", "1. `Parse Source Code <#Parse-Source-Code>`_\n", "2. `Display The Source Code CST <#Display-Source-Code-CST>`_\n", @@ -19,7 +19,7 @@ "\n", "Parse Source Code\n", "=================\n", - "LibCST provides various helpers to parse source code as concrete syntax tree: :func:`~libcst.parse_module`, :func:`~libcst.parse_expression` and :func:`~libcst.parse_statement` (see :doc:`Parsing ` for more detail)." + "LibCST provides various helpers to parse source code as a concrete syntax tree: :func:`~libcst.parse_module`, :func:`~libcst.parse_expression` and :func:`~libcst.parse_statement` (see :doc:`Parsing ` for more detail)." ] }, { @@ -90,7 +90,7 @@ "|\n", "Example: add typing annotation from pyi stub file to Python source\n", "------------------------------------------------------------------\n", - "Python `typing annotation `_ was added in Python 3.5. Some Python applications add typing annotations in separate ``pyi`` stub files in order to support old Python versions. When applications decide to stop supporting old Python versions, they'll want to automatically copy the type annotation from a pyi file to a source file. Here we demonstrate how to do that easliy using LibCST. The first step is to parse the pyi stub and source files as trees." + "Python `typing annotation `_ was added in Python 3.5. Some Python applications add typing annotations in separate ``pyi`` stub files in order to support old Python versions. When applications decide to stop supporting old Python versions, they'll want to automatically copy the type annotation from a pyi file to a source file. Here we demonstrate how to do that easily using LibCST. The first step is to parse the pyi stub and source files as trees." ] }, { @@ -106,7 +106,7 @@ " self._replace(type=self.type.name))\n", "\n", "def tokenize(code, version_info, start_pos=(1, 0)):\n", - " \"\"\"Generate tokens from a the source code (string).\"\"\"\n", + " \"\"\"Generate tokens from the source code (string).\"\"\"\n", " lines = split_lines(code, keepends=True)\n", " return tokenize_lines(lines, version_info, start_pos=start_pos)\n", "'''\n", @@ -134,7 +134,7 @@ "Build Visitor or Transformer\n", "============================\n", "For traversing and modifying the tree, LibCST provides Visitor and Transformer classes similar to the `ast module `_. To implement a visitor (read only) or transformer (read/write), simply implement a subclass of :class:`~libcst.CSTVisitor` or :class:`~libcst.CSTTransformer` (see :doc:`Visitors ` for more detail).\n", - "In the typing example, we need to implement a visitor to collect typing annotation from the stub tree and a transformer to copy the annotation to the function signature. In the visitor, we implement ``visit_FunctionDef`` to collect annotations. Later in the transformer, we implement ``leave_FunctionDef`` to add the collected annotations." + "In the typing example, we need to implement a visitor to collect typing annotations from the stub tree and a transformer to copy the annotation to the function signature. In the visitor, we implement ``visit_FunctionDef`` to collect annotations. Later in the transformer, we implement ``leave_FunctionDef`` to add the collected annotations." ] }, { @@ -226,7 +226,7 @@ "|\n", "Generate Source Code\n", "====================\n", - "Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `ufmt `_ to reformate the code to keep a consistent coding style." + "Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `ufmt `_ to reformat the code to keep a consistent coding style." ] }, { From 9542fc3882a7b16dc05a7248dcd10647f784f624 Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Wed, 30 Jul 2025 16:27:20 +0000 Subject: [PATCH 609/632] remove entry points to pure parser (#1375) * rm: ci * rm: entry point * fix: tests * fix: remove combine step from ci * linter fixes * omit the _parser * fix newlines * fix: remove optional * fix: linter --------- Co-authored-by: thereversiblewheel --- .github/workflows/ci.yml | 9 +-- libcst/_nodes/tests/test_atom.py | 3 +- libcst/_nodes/tests/test_binary_op.py | 3 +- libcst/_nodes/tests/test_classdef.py | 3 - libcst/_nodes/tests/test_dict.py | 3 +- libcst/_nodes/tests/test_funcdef.py | 11 +--- libcst/_nodes/tests/test_list.py | 3 +- libcst/_nodes/tests/test_match.py | 7 +- libcst/_nodes/tests/test_matrix_multiply.py | 3 +- libcst/_nodes/tests/test_module.py | 4 +- libcst/_nodes/tests/test_set.py | 3 +- libcst/_nodes/tests/test_try.py | 7 +- libcst/_nodes/tests/test_tuple.py | 3 +- libcst/_nodes/tests/test_type_alias.py | 5 -- libcst/_nodes/tests/test_with.py | 14 ++-- libcst/_nodes/tests/test_yield.py | 3 +- libcst/_parser/entrypoints.py | 69 ++++---------------- libcst/_parser/tests/test_parse_errors.py | 3 - libcst/codemod/tests/test_codemod_cli.py | 15 ++--- libcst/metadata/tests/test_scope_provider.py | 15 ----- libcst/tests/__main__.py | 5 -- libcst/tests/test_roundtrip.py | 4 +- pyproject.toml | 1 + 23 files changed, 40 insertions(+), 156 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5147f457..4b0fa767 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,15 +39,8 @@ jobs: run: uv sync --locked --dev - name: Native Parser Tests run: uv run poe test - - name: Pure Parser Tests - env: - COVERAGE_FILE: .coverage.pure - LIBCST_PARSER_TYPE: pure - run: uv run poe test - name: Coverage - run: | - uv run coverage combine .coverage.pure - uv run coverage report + run: uv run coverage report # Run linters lint: diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index 82f7ab99..a33732c2 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -9,7 +9,6 @@ from typing import Any import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -1184,7 +1183,7 @@ class AtomTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_binary_op.py b/libcst/_nodes/tests/test_binary_op.py index b86af9fe..f6b40daf 100644 --- a/libcst/_nodes/tests/test_binary_op.py +++ b/libcst/_nodes/tests/test_binary_op.py @@ -8,7 +8,6 @@ from typing import Any import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -189,4 +188,4 @@ class BinaryOperationTest(CSTNodeTest): ) ) def test_parse_error(self, **kwargs: Any) -> None: - self.assert_parses(**kwargs, expect_success=not is_native()) + self.assert_parses(**kwargs, expect_success=False) diff --git a/libcst/_nodes/tests/test_classdef.py b/libcst/_nodes/tests/test_classdef.py index cca36fbb..2e026a6c 100644 --- a/libcst/_nodes/tests/test_classdef.py +++ b/libcst/_nodes/tests/test_classdef.py @@ -8,7 +8,6 @@ from typing import Any, Callable import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -210,8 +209,6 @@ class ClassDefCreationTest(CSTNodeTest): ) ) def test_valid_native(self, **kwargs: Any) -> None: - if not is_native(): - self.skipTest("Disabled for pure python parser") self.validate_node(**kwargs) @data_provider( diff --git a/libcst/_nodes/tests/test_dict.py b/libcst/_nodes/tests/test_dict.py index 1ee33332..47cb0663 100644 --- a/libcst/_nodes/tests/test_dict.py +++ b/libcst/_nodes/tests/test_dict.py @@ -8,7 +8,6 @@ from typing import Any import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -188,6 +187,6 @@ class DictTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 65a0ff07..4ed7fcc3 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -8,7 +8,6 @@ from typing import Any, Callable import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -741,8 +740,6 @@ class FunctionDefCreationTest(CSTNodeTest): ) ) def test_valid(self, **kwargs: Any) -> None: - if not is_native() and kwargs.get("native_only", False): - self.skipTest("Disabled for native parser") if "native_only" in kwargs: kwargs.pop("native_only") self.validate_node(**kwargs) @@ -891,8 +888,6 @@ class FunctionDefCreationTest(CSTNodeTest): ) ) def test_valid_native(self, **kwargs: Any) -> None: - if not is_native(): - self.skipTest("Disabled for pure python parser") self.validate_node(**kwargs) @data_provider( @@ -2223,8 +2218,6 @@ class FunctionDefParserTest(CSTNodeTest): ) ) def test_valid_38(self, node: cst.CSTNode, code: str, **kwargs: Any) -> None: - if not is_native() and kwargs.get("native_only", False): - self.skipTest("disabled for pure python parser") self.validate_node(node, code, _parse_statement_force_38) @data_provider( @@ -2252,7 +2245,7 @@ class FunctionDefParserTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) @@ -2271,6 +2264,4 @@ class FunctionDefParserTest(CSTNodeTest): ) ) def test_parse_error(self, **kwargs: Any) -> None: - if not is_native(): - self.skipTest("Skipped for non-native parser") self.assert_parses(**kwargs, expect_success=False, parser=parse_statement) diff --git a/libcst/_nodes/tests/test_list.py b/libcst/_nodes/tests/test_list.py index 43e22df7..2f96124c 100644 --- a/libcst/_nodes/tests/test_list.py +++ b/libcst/_nodes/tests/test_list.py @@ -8,7 +8,6 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression, parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -126,6 +125,6 @@ class ListTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_match.py b/libcst/_nodes/tests/test_match.py index 2f1e4193..2335b7c3 100644 --- a/libcst/_nodes/tests/test_match.py +++ b/libcst/_nodes/tests/test_match.py @@ -3,17 +3,14 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, Callable, Optional +from typing import Any, Callable import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest -from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider -parser: Optional[Callable[[str], cst.CSTNode]] = ( - parse_statement if is_native() else None -) +parser: Callable[[str], cst.CSTNode] = parse_statement class MatchTest(CSTNodeTest): diff --git a/libcst/_nodes/tests/test_matrix_multiply.py b/libcst/_nodes/tests/test_matrix_multiply.py index 5b4b8668..500b7aab 100644 --- a/libcst/_nodes/tests/test_matrix_multiply.py +++ b/libcst/_nodes/tests/test_matrix_multiply.py @@ -11,7 +11,6 @@ from libcst._nodes.tests.base import ( parse_expression_as, parse_statement_as, ) -from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider @@ -70,6 +69,6 @@ class NamedExprTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_module.py b/libcst/_nodes/tests/test_module.py index 5b33c6b7..40de8f8e 100644 --- a/libcst/_nodes/tests/test_module.py +++ b/libcst/_nodes/tests/test_module.py @@ -8,7 +8,7 @@ from typing import cast, Tuple import libcst as cst from libcst import parse_module, parse_statement from libcst._nodes.tests.base import CSTNodeTest -from libcst._parser.entrypoints import is_native + from libcst.metadata import CodeRange, MetadataWrapper, PositionProvider from libcst.testing.utils import data_provider @@ -117,7 +117,7 @@ class ModuleTest(CSTNodeTest): def test_parser( self, *, code: str, expected: cst.Module, enabled_for_native: bool = True ) -> None: - if is_native() and not enabled_for_native: + if not enabled_for_native: self.skipTest("Disabled for native parser") self.assertEqual(parse_module(code), expected) diff --git a/libcst/_nodes/tests/test_set.py b/libcst/_nodes/tests/test_set.py index 335a4d3a..699b458a 100644 --- a/libcst/_nodes/tests/test_set.py +++ b/libcst/_nodes/tests/test_set.py @@ -8,7 +8,6 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as -from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider @@ -133,6 +132,6 @@ class ListTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index a2e8a433..5704d098 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -3,18 +3,15 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, Callable, Optional +from typing import Any, Callable import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider -native_parse_statement: Optional[Callable[[str], cst.CSTNode]] = ( - parse_statement if is_native() else None -) +native_parse_statement: Callable[[str], cst.CSTNode] = parse_statement class TryTest(CSTNodeTest): diff --git a/libcst/_nodes/tests/test_tuple.py b/libcst/_nodes/tests/test_tuple.py index 0055055c..aa3d68bb 100644 --- a/libcst/_nodes/tests/test_tuple.py +++ b/libcst/_nodes/tests/test_tuple.py @@ -8,7 +8,6 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression, parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -286,6 +285,6 @@ class TupleTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_type_alias.py b/libcst/_nodes/tests/test_type_alias.py index aa26103b..865135c1 100644 --- a/libcst/_nodes/tests/test_type_alias.py +++ b/libcst/_nodes/tests/test_type_alias.py @@ -8,7 +8,6 @@ from typing import Any import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -132,8 +131,6 @@ class TypeAliasCreationTest(CSTNodeTest): ) ) def test_valid(self, **kwargs: Any) -> None: - if not is_native(): - self.skipTest("Disabled in the old parser") self.validate_node(**kwargs) @@ -252,6 +249,4 @@ class TypeAliasParserTest(CSTNodeTest): ) ) def test_valid(self, **kwargs: Any) -> None: - if not is_native(): - self.skipTest("Disabled in the old parser") self.validate_node(**kwargs) diff --git a/libcst/_nodes/tests/test_with.py b/libcst/_nodes/tests/test_with.py index 517ce357..0b396619 100644 --- a/libcst/_nodes/tests/test_with.py +++ b/libcst/_nodes/tests/test_with.py @@ -7,9 +7,7 @@ from typing import Any import libcst as cst from libcst import parse_statement, PartialParserConfig -from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as -from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -187,14 +185,14 @@ class WithTest(CSTNodeTest): cst.WithItem( cst.Call( cst.Name("context_mgr"), - lpar=() if is_native() else (cst.LeftParen(),), - rpar=() if is_native() else (cst.RightParen(),), + lpar=(), + rpar=(), ) ), ), cst.SimpleStatementSuite((cst.Pass(),)), - lpar=(cst.LeftParen() if is_native() else MaybeSentinel.DEFAULT), - rpar=(cst.RightParen() if is_native() else MaybeSentinel.DEFAULT), + lpar=(cst.LeftParen()), + rpar=(cst.RightParen()), whitespace_after_with=cst.SimpleWhitespace(""), ), "code": "with(context_mgr()): pass\n", @@ -233,7 +231,7 @@ class WithTest(CSTNodeTest): rpar=cst.RightParen(whitespace_before=cst.SimpleWhitespace(" ")), ), "code": ("with ( foo(),\n" " bar(), ): pass\n"), # noqa - "parser": parse_statement if is_native() else None, + "parser": parse_statement, "expected_position": CodeRange((1, 0), (2, 21)), }, ) @@ -310,7 +308,7 @@ class WithTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_yield.py b/libcst/_nodes/tests/test_yield.py index 22a18872..e5085b4d 100644 --- a/libcst/_nodes/tests/test_yield.py +++ b/libcst/_nodes/tests/test_yield.py @@ -8,7 +8,6 @@ from typing import Any, Callable, Optional import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_statement_as -from libcst._parser.entrypoints import is_native from libcst.helpers import ensure_type from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -241,6 +240,6 @@ class YieldParsingTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if is_native() and not kwargs.get("expect_success", True): + if not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index d9cee5e9..d034258c 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -9,27 +9,20 @@ parser. A parser entrypoint should take the source code and some configuration information """ -import os from functools import partial from typing import Union +from libcst import native from libcst._nodes.base import CSTNode from libcst._nodes.expression import BaseExpression from libcst._nodes.module import Module from libcst._nodes.statement import BaseCompoundStatement, SimpleStatementLine -from libcst._parser.detect_config import convert_to_utf8, detect_config -from libcst._parser.grammar import get_grammar, validate_grammar -from libcst._parser.python_parser import PythonCSTParser +from libcst._parser.detect_config import convert_to_utf8 from libcst._parser.types.config import PartialParserConfig _DEFAULT_PARTIAL_PARSER_CONFIG: PartialParserConfig = PartialParserConfig() -def is_native() -> bool: - typ = os.environ.get("LIBCST_PARSER_TYPE") - return typ != "pure" - - def _parse( entrypoint: str, source: Union[str, bytes], @@ -38,57 +31,19 @@ def _parse( detect_trailing_newline: bool, detect_default_newline: bool, ) -> CSTNode: - if is_native(): - from libcst.native import parse_expression, parse_module, parse_statement - encoding, source_str = convert_to_utf8(source, partial=config) + encoding, source_str = convert_to_utf8(source, partial=config) - if entrypoint == "file_input": - parse = partial(parse_module, encoding=encoding) - elif entrypoint == "stmt_input": - parse = parse_statement - elif entrypoint == "expression_input": - parse = parse_expression - else: - raise ValueError(f"Unknown parser entry point: {entrypoint}") + if entrypoint == "file_input": + parse = partial(native.parse_module, encoding=encoding) + elif entrypoint == "stmt_input": + parse = native.parse_statement + elif entrypoint == "expression_input": + parse = native.parse_expression + else: + raise ValueError(f"Unknown parser entry point: {entrypoint}") - return parse(source_str) - return _pure_python_parse( - entrypoint, - source, - config, - detect_trailing_newline=detect_trailing_newline, - detect_default_newline=detect_default_newline, - ) - - -def _pure_python_parse( - entrypoint: str, - source: Union[str, bytes], - config: PartialParserConfig, - *, - detect_trailing_newline: bool, - detect_default_newline: bool, -) -> CSTNode: - detection_result = detect_config( - source, - partial=config, - detect_trailing_newline=detect_trailing_newline, - detect_default_newline=detect_default_newline, - ) - validate_grammar() - grammar = get_grammar(config.parsed_python_version, config.future_imports) - - parser = PythonCSTParser( - tokens=detection_result.tokens, - config=detection_result.config, - pgen_grammar=grammar, - start_nonterminal=entrypoint, - ) - # The parser has an Any return type, we can at least refine it to CSTNode here. - result = parser.parse() - assert isinstance(result, CSTNode) - return result + return parse(source_str) def parse_module( diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index 0a058898..7697893d 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -10,7 +10,6 @@ from unittest.mock import patch import libcst as cst from libcst._nodes.base import CSTValidationError -from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider, UnitTest @@ -174,8 +173,6 @@ class ParseErrorsTest(UnitTest): parse_fn() # make sure str() doesn't blow up self.assertIn("Syntax Error", str(cm.exception)) - if not is_native(): - self.assertEqual(str(cm.exception), expected) def test_native_fallible_into_py(self) -> None: with patch("libcst._nodes.expression.Name._validate") as await_validate: diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 18dab870..9798b071 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -12,7 +12,6 @@ import tempfile from pathlib import Path from unittest import skipIf -from libcst._parser.entrypoints import is_native from libcst.codemod import CodemodTest from libcst.testing.utils import UnitTest @@ -37,16 +36,10 @@ class TestCodemodCLI(UnitTest): stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) - if not is_native(): - self.assertIn( - "ParserSyntaxError: Syntax Error @ 14:11.", - rlt.stderr.decode("utf-8"), - ) - else: - self.assertIn( - "error: cannot format -: Cannot parse for target version Python 3.6: 13:10: async with AsyncExitStack() as stack:", - rlt.stderr.decode("utf-8"), - ) + self.assertIn( + "error: cannot format -: Cannot parse for target version Python 3.6: 13:10: async with AsyncExitStack() as stack:", + rlt.stderr.decode("utf-8"), + ) def test_codemod_external(self) -> None: # Test running the NOOP command as an "external command" diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index fd23e993..a367de39 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -11,7 +11,6 @@ from unittest import mock import libcst as cst from libcst import ensure_type -from libcst._parser.entrypoints import is_native from libcst.metadata import MetadataWrapper from libcst.metadata.scope_provider import ( _gen_dotted_names, @@ -2029,8 +2028,6 @@ class ScopeProviderTest(UnitTest): ) def test_type_alias_scope(self) -> None: - if not is_native(): - self.skipTest("type aliases are only supported in the native parser") m, scopes = get_scope_metadata_provider( """ type A = C @@ -2052,8 +2049,6 @@ class ScopeProviderTest(UnitTest): self.assertIsInstance(scopes[alias.value], AnnotationScope) def test_type_alias_param(self) -> None: - if not is_native(): - self.skipTest("type parameters are only supported in the native parser") m, scopes = get_scope_metadata_provider( """ B = int @@ -2084,8 +2079,6 @@ class ScopeProviderTest(UnitTest): ) def test_type_alias_tuple_and_paramspec(self) -> None: - if not is_native(): - self.skipTest("type parameters are only supported in the native parser") m, scopes = get_scope_metadata_provider( """ type A[*T] = T @@ -2113,8 +2106,6 @@ class ScopeProviderTest(UnitTest): self.assertEqual(t_refs[0].node, alias_paramspec.value) def test_class_type_params(self) -> None: - if not is_native(): - self.skipTest("type parameters are only supported in the native parser") m, scopes = get_scope_metadata_provider( """ class W[T]: @@ -2149,8 +2140,6 @@ class ScopeProviderTest(UnitTest): self.assertEqual(t_refs_in_g[0].node, g.returns.annotation) def test_nested_class_type_params(self) -> None: - if not is_native(): - self.skipTest("type parameters are only supported in the native parser") m, scopes = get_scope_metadata_provider( """ class Outer: @@ -2168,8 +2157,6 @@ class ScopeProviderTest(UnitTest): ) def test_annotation_refers_to_nested_class(self) -> None: - if not is_native(): - self.skipTest("type parameters are only supported in the native parser") m, scopes = get_scope_metadata_provider( """ class Outer: @@ -2229,8 +2216,6 @@ class ScopeProviderTest(UnitTest): ) def test_body_isnt_subject_to_special_annotation_rule(self) -> None: - if not is_native(): - self.skipTest("type parameters are only supported in the native parser") m, scopes = get_scope_metadata_provider( """ class Outer: diff --git a/libcst/tests/__main__.py b/libcst/tests/__main__.py index 44e6bbe0..df28d1a6 100644 --- a/libcst/tests/__main__.py +++ b/libcst/tests/__main__.py @@ -5,11 +5,6 @@ from unittest import main -from libcst._parser.entrypoints import is_native - if __name__ == "__main__": - parser_type = "native" if is_native() else "pure" - print(f"running tests with {parser_type!r} parser") - main(module=None, verbosity=2) diff --git a/libcst/tests/test_roundtrip.py b/libcst/tests/test_roundtrip.py index d5da81f2..96d1e507 100644 --- a/libcst/tests/test_roundtrip.py +++ b/libcst/tests/test_roundtrip.py @@ -8,7 +8,7 @@ from pathlib import Path from unittest import TestCase from libcst import CSTTransformer, parse_module -from libcst._parser.entrypoints import is_native + fixtures: Path = Path(__file__).parent.parent.parent / "native/libcst/tests/fixtures" @@ -19,8 +19,6 @@ class NOOPTransformer(CSTTransformer): class RoundTripTests(TestCase): def _get_fixtures(self) -> list[Path]: - if not is_native(): - self.skipTest("pure python parser doesn't work with this") self.assertTrue(fixtures.exists(), f"{fixtures} should exist") files = list(fixtures.iterdir()) self.assertGreater(len(files), 0) diff --git a/pyproject.toml b/pyproject.toml index 3ebaaef1..394eca43 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,6 +66,7 @@ fail_under = 93 precision = 1 show_missing = true skip_covered = true +omit = ["*/_parser/*"] # temporary while I remove the parser [tool.uv] cache-keys = [ From b395d7ccf7727965e74047283d1cd294abd9b459 Mon Sep 17 00:00:00 2001 From: Thomas Serre <118730793+thomas-serre-sonarsource@users.noreply.github.com> Date: Mon, 4 Aug 2025 23:03:20 +0200 Subject: [PATCH 610/632] Fix noqa comments (#1379) --- libcst/_parser/parso/python/token.py | 2 +- libcst/_parser/types/token.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/libcst/_parser/parso/python/token.py b/libcst/_parser/parso/python/token.py index 83731192..164262b9 100644 --- a/libcst/_parser/parso/python/token.py +++ b/libcst/_parser/parso/python/token.py @@ -27,7 +27,7 @@ try: ERROR_DEDENT: TokenType = native_token_type.ERROR_DEDENT except ImportError: - from libcst._parser.parso.python.py_token import ( # noqa F401 + from libcst._parser.parso.python.py_token import ( # noqa: F401 PythonTokenTypes, TokenType, ) diff --git a/libcst/_parser/types/token.py b/libcst/_parser/types/token.py index 32c85ccf..54d904ef 100644 --- a/libcst/_parser/types/token.py +++ b/libcst/_parser/types/token.py @@ -9,4 +9,4 @@ try: Token = tokenize.Token except ImportError: - from libcst._parser.types.py_token import Token # noqa F401 + from libcst._parser.types.py_token import Token # noqa: F401 From 7090a0db2bfa81489556d9ab7a2cdb16bd2ebaf9 Mon Sep 17 00:00:00 2001 From: Ken Kawamoto Date: Mon, 4 Aug 2025 14:27:13 -0700 Subject: [PATCH 611/632] fixes match statements to work with PositionProvider (#1389) * add failing test * fix issue * fixes an issue with PositionProvider not working with case statement * remove comments --------- Co-authored-by: steve --- libcst/_nodes/statement.py | 10 ++++ .../metadata/tests/test_position_provider.py | 47 +++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index 1aba38d3..cdc49edc 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -2886,6 +2886,9 @@ class MatchCase(CSTNode): state.add_token("if") self.whitespace_after_if._codegen(state) guard._codegen(state) + else: + self.whitespace_before_if._codegen(state) + self.whitespace_after_if._codegen(state) self.whitespace_before_colon._codegen(state) state.add_token(":") @@ -3473,6 +3476,13 @@ class MatchAs(MatchPattern): state.add_token(" ") elif isinstance(ws_after, BaseParenthesizableWhitespace): ws_after._codegen(state) + else: + ws_before = self.whitespace_before_as + if isinstance(ws_before, BaseParenthesizableWhitespace): + ws_before._codegen(state) + ws_after = self.whitespace_after_as + if isinstance(ws_after, BaseParenthesizableWhitespace): + ws_after._codegen(state) if name is None: state.add_token("_") else: diff --git a/libcst/metadata/tests/test_position_provider.py b/libcst/metadata/tests/test_position_provider.py index c479837e..14cecec7 100644 --- a/libcst/metadata/tests/test_position_provider.py +++ b/libcst/metadata/tests/test_position_provider.py @@ -83,6 +83,53 @@ class PositionProviderTest(UnitTest): wrapper = MetadataWrapper(parse_module("pass")) wrapper.visit_batched([ABatchable()]) + def test_match_statement_position_metadata(self) -> None: + test = self + + class MatchPositionVisitor(CSTVisitor): + METADATA_DEPENDENCIES = (PositionProvider,) + + def visit_Match(self, node: cst.Match) -> None: + test.assertEqual( + self.get_metadata(PositionProvider, node), + CodeRange((2, 0), (5, 16)), + ) + + def visit_MatchCase(self, node: cst.MatchCase) -> None: + if ( + isinstance(node.pattern, cst.MatchAs) + and node.pattern.name + and node.pattern.name.value == "b" + ): + test.assertEqual( + self.get_metadata(PositionProvider, node), + CodeRange((3, 4), (3, 16)), + ) + elif ( + isinstance(node.pattern, cst.MatchAs) + and node.pattern.name + and node.pattern.name.value == "c" + ): + test.assertEqual( + self.get_metadata(PositionProvider, node), + CodeRange((4, 4), (4, 16)), + ) + elif isinstance(node.pattern, cst.MatchAs) and not node.pattern.name: + test.assertEqual( + self.get_metadata(PositionProvider, node), + CodeRange((5, 4), (5, 16)), + ) + + code = """ +match status: + case b: pass + case c: pass + case _: pass +""" + + wrapper = MetadataWrapper(parse_module(code)) + wrapper.visit(MatchPositionVisitor()) + class PositionProvidingCodegenStateTest(UnitTest): def test_codegen_initial_position(self) -> None: From 441a7f0c816a00ffdf8522fe70421c2627dac74f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Aug 2025 18:58:52 -0700 Subject: [PATCH 612/632] build(deps): bump actions/download-artifact from 4 to 5 (#1390) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4 to 5. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/pypi_upload.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index fd69d5fc..de6cc5e1 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -25,7 +25,7 @@ jobs: persist-credentials: false - name: Download binary wheels id: download - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: pattern: wheels-* path: wheelhouse From 2048e6693c59372576a65fb06719a6e7dc5f0ea3 Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Fri, 29 Aug 2025 15:37:00 -0400 Subject: [PATCH 613/632] bump version to 1.8.3 (#1397) --- CHANGELOG.md | 13 +++++++++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 18 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a8fb401..4b2bd514 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,16 @@ +# 1.8.3 - 2025-08-29 +## What's Changed +* removed: remove entry points to pure parser by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1375 +* fixed: fixes match statements to work with PositionProvider by @imsut in https://github.com/Instagram/LibCST/pull/1389 + + +## New Contributors +* @hunterhogan made their first contribution in https://github.com/Instagram/LibCST/pull/1378 +* @thomas-serre-sonarsource made their first contribution in https://github.com/Instagram/LibCST/pull/1379 +* @imsut made their first contribution in https://github.com/Instagram/LibCST/pull/1389 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.2...v1.8.3 + # 1.8.2 - 2025-06-13 # Fixed diff --git a/native/Cargo.lock b/native/Cargo.lock index ee5f0d57..7204b9f2 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -286,7 +286,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.8.2" +version = "1.8.3" dependencies = [ "annotate-snippets", "criterion", @@ -304,7 +304,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.8.2" +version = "1.8.3" dependencies = [ "quote", "syn", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 5b9f2f86..903a231b 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.8.2" +version = "1.8.3" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.5" annotate-snippets = "0.11.5" regex = "1.11.1" memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.8.2" } +libcst_derive = { path = "../libcst_derive", version = "1.8.3" } [dev-dependencies] criterion = { version = "0.6.0", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index f538e46e..9541f243 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.8.2" +version = "1.8.3" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From f746afd537910d99bd8226a1c1edbf5bc69630eb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Sep 2025 16:23:29 -0400 Subject: [PATCH 614/632] build(deps): bump rayon from 1.10.0 to 1.11.0 in /native (#1394) Bumps [rayon](https://github.com/rayon-rs/rayon) from 1.10.0 to 1.11.0. - [Changelog](https://github.com/rayon-rs/rayon/blob/main/RELEASES.md) - [Commits](https://github.com/rayon-rs/rayon/compare/rayon-core-v1.10.0...rayon-core-v1.11.0) --- updated-dependencies: - dependency-name: rayon dependency-version: 1.11.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 8 ++++---- native/libcst/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 7204b9f2..d978117e 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -514,9 +514,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -524,9 +524,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 903a231b..c13a6b19 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -47,7 +47,7 @@ libcst_derive = { path = "../libcst_derive", version = "1.8.3" } [dev-dependencies] criterion = { version = "0.6.0", features = ["html_reports"] } difference = "2.0.0" -rayon = "1.10.0" +rayon = "1.11.0" itertools = "0.14.0" [[bench]] From e064729b4c04b834754c7be7f43a4dfb38972570 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Sep 2025 16:23:44 -0400 Subject: [PATCH 615/632] build(deps): bump pypa/cibuildwheel from 3.0.1 to 3.1.4 (#1395) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 3.0.1 to 3.1.4. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v3.0.1...v3.1.4) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-version: 3.1.4 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bffe5fca..aa9c9686 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -44,7 +44,7 @@ jobs: if: github.event_name != 'release' run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v3.0.1 + uses: pypa/cibuildwheel@v3.1.4 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From d721a06c3fc3ab9210acd180cec5aaca856e1bc7 Mon Sep 17 00:00:00 2001 From: Stephen Morton Date: Wed, 3 Sep 2025 13:54:44 -0700 Subject: [PATCH 616/632] generate Attribute nodes when applying type annotations (#1396) * generate Attribute nodes when applying type annotations The old version generated an incorrect CST which happened to work as long as you didn't do further processing. * add a test --- .../visitors/_apply_type_annotations.py | 11 +++-- .../tests/test_apply_type_annotations.py | 48 +++++++++++++++++++ 2 files changed, 56 insertions(+), 3 deletions(-) diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 9c826cc4..59347420 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -534,15 +534,20 @@ class _TypeCollectorDequalifier(cst.CSTTransformer): def __init__(self, type_collector: "TypeCollector") -> None: self.type_collector = type_collector - def leave_Name(self, original_node: cst.Name, updated_node: cst.Name) -> cst.Name: + def leave_Name( + self, original_node: cst.Name, updated_node: cst.Name + ) -> NameOrAttribute: qualified_name = _get_unique_qualified_name(self.type_collector, original_node) should_qualify = self.type_collector._handle_qualification_and_should_qualify( qualified_name, original_node ) self.type_collector.annotations.names.add(qualified_name) if should_qualify: - qualified_node = cst.parse_module(qualified_name) - return qualified_node # pyre-ignore[7] + parts = qualified_name.split(".") + qualified_node = cst.Name(parts[0]) + for p in parts[1:]: + qualified_node = cst.Attribute(qualified_node, cst.Name(p)) + return qualified_node else: return original_node diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index c7ea5c22..e7b25124 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -61,6 +61,28 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) self.assertCodemod(before, after, context_override=context) + def run_test_case_twice( + self, + stub: str, + before: str, + after: str, + ) -> None: + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, parse_module(textwrap.dedent(stub.rstrip())) + ) + r1 = ApplyTypeAnnotationsVisitor(context).transform_module( + parse_module(textwrap.dedent(before.rstrip())) + ) + + context = CodemodContext() + ApplyTypeAnnotationsVisitor.store_stub_in_context( + context, parse_module(textwrap.dedent(stub.rstrip())) + ) + r2 = ApplyTypeAnnotationsVisitor(context).transform_module(r1) + assert r1.code == textwrap.dedent(after.rstrip()) + assert r2.code == textwrap.dedent(after.rstrip()) + @data_provider( { "simple": ( @@ -1965,3 +1987,29 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) def test_no_duplicate_annotations(self, stub: str, before: str, after: str) -> None: self.run_simple_test_case(stub=stub, before=before, after=after) + + @data_provider( + { + "qualifier_jank": ( + """ + from module.submodule import B + M: B + class Foo: ... + """, + """ + from module import B + M = B() + class Foo: pass + """, + """ + from module import B + import module.submodule + + M: module.submodule.B = B() + class Foo: pass + """, + ), + } + ) + def test_idempotent(self, stub: str, before: str, after: str) -> None: + self.run_test_case_twice(stub=stub, before=before, after=after) From f40d8351450ca9adfe5270bd86ec7951e83221f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 7 Sep 2025 21:29:40 -0700 Subject: [PATCH 617/632] build(deps): bump actions/setup-python from 5 to 6 (#1400) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 5 to 6. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/setup-python dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 2 +- .github/workflows/pypi_upload.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index aa9c9686..9f1c6d9f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -28,7 +28,7 @@ jobs: with: fetch-depth: 0 persist-credentials: false - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: "3.12" - uses: dtolnay/rust-toolchain@stable diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4b0fa767..d8d7d244 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -110,7 +110,7 @@ jobs: - uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} - name: test diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index de6cc5e1..d9d0bcb5 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -30,7 +30,7 @@ jobs: pattern: wheels-* path: wheelhouse merge-multiple: true - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: "3.10" - name: Install uv From 0c82bfa76166ed38409a97de8974f9aaf3e7e572 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 7 Sep 2025 21:30:06 -0700 Subject: [PATCH 618/632] build(deps): bump regex from 1.11.1 to 1.11.2 in /native (#1399) Bumps [regex](https://github.com/rust-lang/regex) from 1.11.1 to 1.11.2. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.11.1...1.11.2) --- updated-dependencies: - dependency-name: regex dependency-version: 1.11.2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index d978117e..7d39b6ec 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -534,9 +534,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.1" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912" dependencies = [ "aho-corasick", "memchr", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index c13a6b19..b3abfa63 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -40,7 +40,7 @@ pyo3 = { version = "0.25.1", optional = true } thiserror = "2.0.12" peg = "0.8.5" annotate-snippets = "0.11.5" -regex = "1.11.1" +regex = "1.11.2" memchr = "2.7.4" libcst_derive = { path = "../libcst_derive", version = "1.8.3" } From 48668dfabb25e4bbdd35f81407498f5f5e47bf88 Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Tue, 9 Sep 2025 11:16:20 -0400 Subject: [PATCH 619/632] Support parsing of t-strings #1374 (#1398) #1343 Adds support to parse t-strings Couple things of note: TemplatedString* is largely a copy of FormattedString* Since clients operate of libcst objects I consider this this part of a public API - following the python grammar (where TStrings are distinct from FStrings) seems like a good way to avoid changes to the API in the future. Within the tokenizer we reuse the fstring machinery I consider this an implementation detail, fstrings and tstrings are (for now) identical, we can change this later without changes to the public api. Since 2 -> we have a new FTStringType enum We need to discriminate between f and t strings to know which token to return, a bit clumsy to use in my opinion - so looking for feedback here on how to improve this. --- libcst/__init__.py | 8 + libcst/_nodes/expression.py | 247 ++++++++++++ libcst/_nodes/tests/test_template_strings.py | 183 +++++++++ libcst/_typed_visitor.py | 180 +++++++++ libcst/matchers/__init__.py | 377 ++++++++++++++++++ libcst/matchers/_return_types.py | 7 + native/libcst/src/lib.rs | 21 +- native/libcst/src/nodes/expression.rs | 139 +++++++ native/libcst/src/nodes/mod.rs | 8 +- native/libcst/src/parser/grammar.rs | 92 ++++- native/libcst/src/tokenizer/core/mod.rs | 119 ++++-- .../libcst/src/tokenizer/core/string_types.rs | 14 +- native/libcst/src/tokenizer/operators.rs | 2 +- native/libcst/src/tokenizer/tests.rs | 56 ++- native/libcst/tests/fixtures/super_strings.py | 8 + 15 files changed, 1406 insertions(+), 55 deletions(-) create mode 100644 libcst/_nodes/tests/test_template_strings.py diff --git a/libcst/__init__.py b/libcst/__init__.py index 4e82bc9e..0cd54d62 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -29,6 +29,7 @@ from libcst._nodes.expression import ( BaseSimpleComp, BaseSlice, BaseString, + BaseTemplatedStringContent, BinaryOperation, BooleanOperation, Call, @@ -75,6 +76,9 @@ from libcst._nodes.expression import ( StarredElement, Subscript, SubscriptElement, + TemplatedString, + TemplatedStringExpression, + TemplatedStringText, Tuple, UnaryOperation, Yield, @@ -268,6 +272,7 @@ __all__ = [ "BaseElement", "BaseExpression", "BaseFormattedStringContent", + "BaseTemplatedStringContent", "BaseList", "BaseNumber", "BaseSet", @@ -291,6 +296,9 @@ __all__ = [ "FormattedString", "FormattedStringExpression", "FormattedStringText", + "TemplatedString", + "TemplatedStringText", + "TemplatedStringExpression", "From", "GeneratorExp", "IfExp", diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index 00a1b4a7..eb95d9b3 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -958,6 +958,253 @@ class FormattedString(_BasePrefixedString): state.add_token(self.end) +class BaseTemplatedStringContent(CSTNode, ABC): + """ + The base type for :class:`TemplatedStringText` and + :class:`TemplatedStringExpression`. A :class:`TemplatedString` is composed of a + sequence of :class:`BaseTemplatedStringContent` parts. + """ + + __slots__ = () + + +@add_slots +@dataclass(frozen=True) +class TemplatedStringText(BaseTemplatedStringContent): + """ + Part of a :class:`TemplatedString` that is not inside curly braces (``{`` or ``}``). + For example, in:: + + f"ab{cd}ef" + + ``ab`` and ``ef`` are :class:`TemplatedStringText` nodes, but ``{cd}`` is a + :class:`TemplatedStringExpression`. + """ + + #: The raw string value, including any escape characters present in the source + #: code, not including any enclosing quotes. + value: str + + def _visit_and_replace_children( + self, visitor: CSTVisitorT + ) -> "TemplatedStringText": + return TemplatedStringText(value=self.value) + + def _codegen_impl(self, state: CodegenState) -> None: + state.add_token(self.value) + + +@add_slots +@dataclass(frozen=True) +class TemplatedStringExpression(BaseTemplatedStringContent): + """ + Part of a :class:`TemplatedString` that is inside curly braces (``{`` or ``}``), + including the surrounding curly braces. For example, in:: + + f"ab{cd}ef" + + ``{cd}`` is a :class:`TemplatedStringExpression`, but ``ab`` and ``ef`` are + :class:`TemplatedStringText` nodes. + + An t-string expression may contain ``conversion`` and ``format_spec`` suffixes that + control how the expression is converted to a string. + """ + + #: The expression we will evaluate and render when generating the string. + expression: BaseExpression + + #: An optional conversion specifier, such as ``!s``, ``!r`` or ``!a``. + conversion: Optional[str] = None + + #: An optional format specifier following the `format specification mini-language + #: `_. + format_spec: Optional[Sequence[BaseTemplatedStringContent]] = None + + #: Whitespace after the opening curly brace (``{``), but before the ``expression``. + whitespace_before_expression: BaseParenthesizableWhitespace = ( + SimpleWhitespace.field("") + ) + + #: Whitespace after the ``expression``, but before the ``conversion``, + #: ``format_spec`` and the closing curly brace (``}``). Python does not + #: allow whitespace inside or after a ``conversion`` or ``format_spec``. + whitespace_after_expression: BaseParenthesizableWhitespace = SimpleWhitespace.field( + "" + ) + + #: Equal sign for Templated string expression uses self-documenting expressions, + #: such as ``f"{x=}"``. See the `Python 3.8 release notes + #: `_. + equal: Optional[AssignEqual] = None + + def _validate(self) -> None: + if self.conversion is not None and self.conversion not in ("s", "r", "a"): + raise CSTValidationError("Invalid t-string conversion.") + + def _visit_and_replace_children( + self, visitor: CSTVisitorT + ) -> "TemplatedStringExpression": + format_spec = self.format_spec + return TemplatedStringExpression( + whitespace_before_expression=visit_required( + self, + "whitespace_before_expression", + self.whitespace_before_expression, + visitor, + ), + expression=visit_required(self, "expression", self.expression, visitor), + equal=visit_optional(self, "equal", self.equal, visitor), + whitespace_after_expression=visit_required( + self, + "whitespace_after_expression", + self.whitespace_after_expression, + visitor, + ), + conversion=self.conversion, + format_spec=( + visit_sequence(self, "format_spec", format_spec, visitor) + if format_spec is not None + else None + ), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + state.add_token("{") + self.whitespace_before_expression._codegen(state) + self.expression._codegen(state) + equal = self.equal + if equal is not None: + equal._codegen(state) + self.whitespace_after_expression._codegen(state) + conversion = self.conversion + if conversion is not None: + state.add_token("!") + state.add_token(conversion) + format_spec = self.format_spec + if format_spec is not None: + state.add_token(":") + for spec in format_spec: + spec._codegen(state) + state.add_token("}") + + +@add_slots +@dataclass(frozen=True) +class TemplatedString(_BasePrefixedString): + """ + An "t-string". Template strings are a generalization of f-strings, + using a t in place of the f prefix. Instead of evaluating to str, + t-strings evaluate to a new type: Template + + T-Strings are defined in 'PEP 750' + + >>> import libcst as cst + >>> cst.parse_expression('t"ab{cd}ef"') + TemplatedString( + parts=[ + TemplatedStringText( + value='ab', + ), + TemplatedStringExpression( + expression=Name( + value='cd', + lpar=[], + rpar=[], + ), + conversion=None, + format_spec=None, + whitespace_before_expression=SimpleWhitespace( + value='', + ), + whitespace_after_expression=SimpleWhitespace( + value='', + ), + equal=None, + ), + TemplatedStringText( + value='ef', + ), + ], + start='t"', + end='"', + lpar=[], + rpar=[], + ) + >>> + """ + + #: A templated string is composed as a series of :class:`TemplatedStringText` and + #: :class:`TemplatedStringExpression` parts. + parts: Sequence[BaseTemplatedStringContent] + + #: The string prefix and the leading quote, such as ``t"``, ``T'``, ``tr"``, or + #: ``t"""``. + start: str = 't"' + + #: The trailing quote. This must match the type of quote used in ``start``. + end: Literal['"', "'", '"""', "'''"] = '"' + + lpar: Sequence[LeftParen] = () + #: Sequence of parenthesis for precidence dictation. + rpar: Sequence[RightParen] = () + + def _validate(self) -> None: + super(_BasePrefixedString, self)._validate() + + # Validate any prefix + prefix = self.prefix + if prefix not in ("t", "tr", "rt"): + raise CSTValidationError("Invalid t-string prefix.") + + # Validate wrapping quotes + starttoken = self.start[len(prefix) :] + if starttoken != self.end: + raise CSTValidationError("t-string must have matching enclosing quotes.") + + # Validate valid wrapping quote usage + if starttoken not in ('"', "'", '"""', "'''"): + raise CSTValidationError("Invalid t-string enclosing quotes.") + + @property + def prefix(self) -> str: + """ + Returns the string's prefix, if any exists. The prefix can be ``t``, + ``tr``, or ``rt``. + """ + + prefix = "" + for c in self.start: + if c in ['"', "'"]: + break + prefix += c + return prefix.lower() + + @property + def quote(self) -> StringQuoteLiteral: + """ + Returns the quotation used to denote the string. Can be either ``'``, + ``"``, ``'''`` or ``\"\"\"``. + """ + + return self.end + + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TemplatedString": + return TemplatedString( + lpar=visit_sequence(self, "lpar", self.lpar, visitor), + start=self.start, + parts=visit_sequence(self, "parts", self.parts, visitor), + end=self.end, + rpar=visit_sequence(self, "rpar", self.rpar, visitor), + ) + + def _codegen_impl(self, state: CodegenState) -> None: + with self._parenthesize(state): + state.add_token(self.start) + for part in self.parts: + part._codegen(state) + state.add_token(self.end) + + @add_slots @dataclass(frozen=True) class ConcatenatedString(BaseString): diff --git a/libcst/_nodes/tests/test_template_strings.py b/libcst/_nodes/tests/test_template_strings.py new file mode 100644 index 00000000..6e4c308a --- /dev/null +++ b/libcst/_nodes/tests/test_template_strings.py @@ -0,0 +1,183 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Callable, Optional + +import libcst as cst +from libcst import parse_expression +from libcst._nodes.tests.base import CSTNodeTest +from libcst.metadata import CodeRange +from libcst.testing.utils import data_provider + + +class TemplatedStringTest(CSTNodeTest): + @data_provider( + ( + # Simple t-string with only text + ( + cst.TemplatedString( + parts=(cst.TemplatedStringText("hello world"),), + ), + 't"hello world"', + True, + ), + # t-string with one expression + ( + cst.TemplatedString( + parts=( + cst.TemplatedStringText("hello "), + cst.TemplatedStringExpression( + expression=cst.Name("name"), + ), + ), + ), + 't"hello {name}"', + True, + ), + # t-string with multiple expressions + ( + cst.TemplatedString( + parts=( + cst.TemplatedStringText("a="), + cst.TemplatedStringExpression(expression=cst.Name("a")), + cst.TemplatedStringText(", b="), + cst.TemplatedStringExpression(expression=cst.Name("b")), + ), + ), + 't"a={a}, b={b}"', + True, + CodeRange((1, 0), (1, 15)), + ), + # t-string with nested expression + ( + cst.TemplatedString( + parts=( + cst.TemplatedStringText("sum="), + cst.TemplatedStringExpression( + expression=cst.BinaryOperation( + left=cst.Name("a"), + operator=cst.Add(), + right=cst.Name("b"), + ) + ), + ), + ), + 't"sum={a + b}"', + True, + ), + # t-string with spacing in expression + ( + cst.TemplatedString( + parts=( + cst.TemplatedStringText("x = "), + cst.TemplatedStringExpression( + whitespace_before_expression=cst.SimpleWhitespace(" "), + expression=cst.Name("x"), + whitespace_after_expression=cst.SimpleWhitespace(" "), + ), + ), + ), + 't"x = { x }"', + True, + ), + # t-string with escaped braces + ( + cst.TemplatedString( + parts=(cst.TemplatedStringText("{{foo}}"),), + ), + 't"{{foo}}"', + True, + ), + # t-string with only an expression + ( + cst.TemplatedString( + parts=( + cst.TemplatedStringExpression(expression=cst.Name("value")), + ), + ), + 't"{value}"', + True, + ), + # t-string with whitespace and newlines + ( + cst.TemplatedString( + parts=( + cst.TemplatedStringText("line1\\n"), + cst.TemplatedStringExpression(expression=cst.Name("x")), + cst.TemplatedStringText("\\nline2"), + ), + ), + 't"line1\\n{x}\\nline2"', + True, + ), + # t-string with parenthesis (not typical, but test node construction) + ( + cst.TemplatedString( + lpar=(cst.LeftParen(),), + parts=(cst.TemplatedStringText("foo"),), + rpar=(cst.RightParen(),), + ), + '(t"foo")', + True, + ), + # t-string with whitespace in delimiters + ( + cst.TemplatedString( + lpar=(cst.LeftParen(whitespace_after=cst.SimpleWhitespace(" ")),), + parts=(cst.TemplatedStringText("foo"),), + rpar=(cst.RightParen(whitespace_before=cst.SimpleWhitespace(" ")),), + ), + '( t"foo" )', + True, + ), + # Test TemplatedStringText and TemplatedStringExpression individually + ( + cst.TemplatedStringText("abc"), + "abc", + False, + CodeRange((1, 0), (1, 3)), + ), + ( + cst.TemplatedStringExpression(expression=cst.Name("foo")), + "{foo}", + False, + CodeRange((1, 0), (1, 5)), + ), + ) + ) + def test_valid( + self, + node: cst.CSTNode, + code: str, + check_parsing: bool, + position: Optional[CodeRange] = None, + ) -> None: + if check_parsing: + self.validate_node(node, code, parse_expression, expected_position=position) + else: + self.validate_node(node, code, expected_position=position) + + @data_provider( + ( + ( + lambda: cst.TemplatedString( + parts=(cst.TemplatedStringText("foo"),), + lpar=(cst.LeftParen(),), + ), + "left paren without right paren", + ), + ( + lambda: cst.TemplatedString( + parts=(cst.TemplatedStringText("foo"),), + rpar=(cst.RightParen(),), + ), + "right paren without left paren", + ), + ) + ) + def test_invalid( + self, get_node: Callable[[], cst.CSTNode], expected_re: str + ) -> None: + self.assert_invalid(get_node, expected_re) diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index e6322353..8816f619 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -25,6 +25,7 @@ if TYPE_CHECKING: BaseExpression, BaseFormattedStringContent, BaseSlice, + BaseTemplatedStringContent, BinaryOperation, BooleanOperation, Call, @@ -71,6 +72,9 @@ if TYPE_CHECKING: StarredElement, Subscript, SubscriptElement, + TemplatedString, + TemplatedStringExpression, + TemplatedStringText, Tuple, UnaryOperation, Yield, @@ -5182,6 +5186,140 @@ class CSTTypedBaseFunctions: def leave_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: pass + @mark_no_op + def visit_TemplatedString(self, node: "TemplatedString") -> Optional[bool]: + pass + + @mark_no_op + def visit_TemplatedString_parts(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def leave_TemplatedString_parts(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def visit_TemplatedString_start(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def leave_TemplatedString_start(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def visit_TemplatedString_end(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def leave_TemplatedString_end(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def visit_TemplatedString_lpar(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def leave_TemplatedString_lpar(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def visit_TemplatedString_rpar(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def leave_TemplatedString_rpar(self, node: "TemplatedString") -> None: + pass + + @mark_no_op + def visit_TemplatedStringExpression( + self, node: "TemplatedStringExpression" + ) -> Optional[bool]: + pass + + @mark_no_op + def visit_TemplatedStringExpression_expression( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_TemplatedStringExpression_expression( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_TemplatedStringExpression_conversion( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_TemplatedStringExpression_conversion( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_TemplatedStringExpression_format_spec( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_TemplatedStringExpression_format_spec( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_TemplatedStringExpression_whitespace_before_expression( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_TemplatedStringExpression_whitespace_before_expression( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_TemplatedStringExpression_whitespace_after_expression( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_TemplatedStringExpression_whitespace_after_expression( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_TemplatedStringExpression_equal( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_TemplatedStringExpression_equal( + self, node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def visit_TemplatedStringText(self, node: "TemplatedStringText") -> Optional[bool]: + pass + + @mark_no_op + def visit_TemplatedStringText_value(self, node: "TemplatedStringText") -> None: + pass + + @mark_no_op + def leave_TemplatedStringText_value(self, node: "TemplatedStringText") -> None: + pass + @mark_no_op def visit_TrailingWhitespace(self, node: "TrailingWhitespace") -> Optional[bool]: pass @@ -6385,6 +6523,20 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_SubtractAssign(self, original_node: "SubtractAssign") -> None: pass + @mark_no_op + def leave_TemplatedString(self, original_node: "TemplatedString") -> None: + pass + + @mark_no_op + def leave_TemplatedStringExpression( + self, original_node: "TemplatedStringExpression" + ) -> None: + pass + + @mark_no_op + def leave_TemplatedStringText(self, original_node: "TemplatedStringText") -> None: + pass + @mark_no_op def leave_TrailingWhitespace(self, original_node: "TrailingWhitespace") -> None: pass @@ -7402,6 +7554,34 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> "BaseAugOp": return updated_node + @mark_no_op + def leave_TemplatedString( + self, original_node: "TemplatedString", updated_node: "TemplatedString" + ) -> "BaseExpression": + return updated_node + + @mark_no_op + def leave_TemplatedStringExpression( + self, + original_node: "TemplatedStringExpression", + updated_node: "TemplatedStringExpression", + ) -> Union[ + "BaseTemplatedStringContent", + FlattenSentinel["BaseTemplatedStringContent"], + RemovalSentinel, + ]: + return updated_node + + @mark_no_op + def leave_TemplatedStringText( + self, original_node: "TemplatedStringText", updated_node: "TemplatedStringText" + ) -> Union[ + "BaseTemplatedStringContent", + FlattenSentinel["BaseTemplatedStringContent"], + RemovalSentinel, + ]: + return updated_node + @mark_no_op def leave_TrailingWhitespace( self, original_node: "TrailingWhitespace", updated_node: "TrailingWhitespace" diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 57e1b4c9..2857fee1 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -142,6 +142,10 @@ class BaseSuite(_NodeABC): pass +class BaseTemplatedStringContent(_NodeABC): + pass + + class BaseUnaryOp(_NodeABC): pass @@ -14283,6 +14287,375 @@ class SubtractAssign(BaseAugOp, BaseMatcherNode): ] = DoNotCare() +BaseTemplatedStringContentMatchType = Union[ + "BaseTemplatedStringContent", + MetadataMatchType, + MatchIfTrue[cst.BaseTemplatedStringContent], +] + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TemplatedString(BaseExpression, BaseString, BaseMatcherNode): + parts: Union[ + Sequence[ + Union[ + BaseTemplatedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + AtLeastN[ + Union[ + BaseTemplatedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseTemplatedStringContentMatchType, + DoNotCareSentinel, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]], + OneOf[ + Union[ + Sequence[ + Union[ + BaseTemplatedStringContentMatchType, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + AtLeastN[ + Union[ + BaseTemplatedStringContentMatchType, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseTemplatedStringContentMatchType, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + BaseTemplatedStringContentMatchType, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + AtLeastN[ + Union[ + BaseTemplatedStringContentMatchType, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + ] + ], + AtMostN[ + Union[ + BaseTemplatedStringContentMatchType, + OneOf[BaseTemplatedStringContentMatchType], + AllOf[BaseTemplatedStringContentMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]], + ] + ], + ] = DoNotCare() + start: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + end: Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + DoNotCareSentinel, + OneOf[ + Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + ] + ], + AllOf[ + Union[ + Literal['"', "'", '"""', "'''"], + MetadataMatchType, + MatchIfTrue[Literal['"', "'", '"""', "'''"]], + ] + ], + ] = DoNotCare() + lpar: Union[ + Sequence[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + DoNotCareSentinel, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.LeftParen]], + OneOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + AtLeastN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + AtMostN[ + Union[ + LeftParenMatchType, + OneOf[LeftParenMatchType], + AllOf[LeftParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.LeftParen]], + ] + ], + ] = DoNotCare() + rpar: Union[ + Sequence[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + DoNotCareSentinel, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + DoNotCareSentinel, + MatchIfTrue[Sequence[cst.RightParen]], + OneOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + AllOf[ + Union[ + Sequence[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + AtLeastN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + AtMostN[ + Union[ + RightParenMatchType, + OneOf[RightParenMatchType], + AllOf[RightParenMatchType], + ] + ], + ] + ], + MatchIfTrue[Sequence[cst.RightParen]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TemplatedStringExpression(BaseTemplatedStringContent, BaseMatcherNode): + expression: Union[ + BaseExpressionMatchType, + DoNotCareSentinel, + OneOf[BaseExpressionMatchType], + AllOf[BaseExpressionMatchType], + ] = DoNotCare() + conversion: Union[ + Optional[str], + MetadataMatchType, + MatchIfTrue[Optional[str]], + DoNotCareSentinel, + OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], + ] = DoNotCare() + format_spec: Union[ + Optional[Sequence["BaseTemplatedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional[Sequence["BaseTemplatedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]], + ] + ], + AllOf[ + Union[ + Optional[Sequence["BaseTemplatedStringContent"]], + MetadataMatchType, + MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]], + ] + ], + ] = DoNotCare() + whitespace_before_expression: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + whitespace_after_expression: Union[ + BaseParenthesizableWhitespaceMatchType, + DoNotCareSentinel, + OneOf[BaseParenthesizableWhitespaceMatchType], + AllOf[BaseParenthesizableWhitespaceMatchType], + ] = DoNotCare() + equal: Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + DoNotCareSentinel, + OneOf[ + Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + ] + ], + AllOf[ + Union[ + Optional["AssignEqual"], + MetadataMatchType, + MatchIfTrue[Optional[cst.AssignEqual]], + ] + ], + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + +@dataclass(frozen=True, eq=False, unsafe_hash=False) +class TemplatedStringText(BaseTemplatedStringContent, BaseMatcherNode): + value: Union[ + strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] + ] = DoNotCare() + metadata: Union[ + MetadataMatchType, + DoNotCareSentinel, + OneOf[MetadataMatchType], + AllOf[MetadataMatchType], + ] = DoNotCare() + + @dataclass(frozen=True, eq=False, unsafe_hash=False) class TrailingWhitespace(BaseMatcherNode): whitespace: Union[ @@ -16122,6 +16495,7 @@ __all__ = [ "BaseStatement", "BaseString", "BaseSuite", + "BaseTemplatedStringContent", "BaseUnaryOp", "BinaryOperation", "BitAnd", @@ -16274,6 +16648,9 @@ __all__ = [ "SubscriptElement", "Subtract", "SubtractAssign", + "TemplatedString", + "TemplatedStringExpression", + "TemplatedStringText", "TrailingWhitespace", "Try", "TryStar", diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index d49e009c..2f050088 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -20,6 +20,7 @@ from libcst._nodes.expression import ( BaseExpression, BaseFormattedStringContent, BaseSlice, + BaseTemplatedStringContent, BinaryOperation, BooleanOperation, Call, @@ -66,6 +67,9 @@ from libcst._nodes.expression import ( StarredElement, Subscript, SubscriptElement, + TemplatedString, + TemplatedStringExpression, + TemplatedStringText, Tuple, UnaryOperation, Yield, @@ -358,6 +362,9 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { SubscriptElement: Union[SubscriptElement, RemovalSentinel], Subtract: BaseBinaryOp, SubtractAssign: BaseAugOp, + TemplatedString: BaseExpression, + TemplatedStringExpression: Union[BaseTemplatedStringContent, RemovalSentinel], + TemplatedStringText: Union[BaseTemplatedStringContent, RemovalSentinel], TrailingWhitespace: TrailingWhitespace, Try: Union[BaseStatement, RemovalSentinel], TryStar: Union[BaseStatement, RemovalSentinel], diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index 93e6bd05..113d5d02 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -25,7 +25,7 @@ pub fn tokenize(text: &str) -> Result> { text, &TokConfig { async_hacks: false, - split_fstring: true, + split_ftstring: true, }, ); @@ -191,4 +191,23 @@ mod test { assert_eq!(11, bol_offset("hello\nhello", 3)); assert_eq!(12, bol_offset("hello\nhello\nhello", 3)); } + #[test] + fn test_tstring_basic() { + assert!( + parse_module("t'hello'", None).is_ok(), + "Failed to parse t'hello'" + ); + assert!( + parse_module("t'{hello}'", None).is_ok(), + "Failed to parse t'{{hello}}'" + ); + assert!( + parse_module("t'{hello:r}'", None).is_ok(), + "Failed to parse t'{{hello:r}}'" + ); + assert!( + parse_module("f'line1\\n{hello:r}\\nline2'", None).is_ok(), + "Failed to parse t'line1\\n{{hello:r}}\\nline2'" + ); + } } diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index cbd1e361..961eee9f 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -474,6 +474,7 @@ pub enum Expression<'a> { SimpleString(Box>), ConcatenatedString(Box>), FormattedString(Box>), + TemplatedString(Box>), NamedExpr(Box>), } @@ -2249,6 +2250,7 @@ pub enum String<'a> { Simple(SimpleString<'a>), Concatenated(ConcatenatedString<'a>), Formatted(FormattedString<'a>), + Templated(TemplatedString<'a>), } impl<'r, 'a> std::convert::From> for DeflatedExpression<'r, 'a> { @@ -2257,6 +2259,7 @@ impl<'r, 'a> std::convert::From> for DeflatedExpression<' DeflatedString::Simple(s) => Self::SimpleString(Box::new(s)), DeflatedString::Concatenated(s) => Self::ConcatenatedString(Box::new(s)), DeflatedString::Formatted(s) => Self::FormattedString(Box::new(s)), + DeflatedString::Templated(s) => Self::TemplatedString(Box::new(s)), } } } @@ -2334,6 +2337,142 @@ impl<'a> Codegen<'a> for SimpleString<'a> { } } +#[cst_node] +pub struct TemplatedStringText<'a> { + pub value: &'a str, +} + +impl<'r, 'a> Inflate<'a> for DeflatedTemplatedStringText<'r, 'a> { + type Inflated = TemplatedStringText<'a>; + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(Self::Inflated { value: self.value }) + } +} + +impl<'a> Codegen<'a> for TemplatedStringText<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token(self.value); + } +} + +pub(crate) fn make_tstringtext<'r, 'a>(value: &'a str) -> DeflatedTemplatedStringText<'r, 'a> { + DeflatedTemplatedStringText { + value, + _phantom: Default::default(), + } +} + +#[cst_node] +pub struct TemplatedStringExpression<'a> { + // This represents the part of a t-string that is insde the brackets '{' and '}'. + pub expression: Expression<'a>, + pub conversion: Option<&'a str>, + pub format_spec: Option>>, + pub whitespace_before_expression: ParenthesizableWhitespace<'a>, + pub whitespace_after_expression: ParenthesizableWhitespace<'a>, + pub equal: Option>, + + pub(crate) lbrace_tok: TokenRef<'a>, + // This is None if there's an equal sign, otherwise it's the first token of + // (conversion, format spec, right brace) in that order + pub(crate) after_expr_tok: Option>, +} + +impl<'r, 'a> Inflate<'a> for DeflatedTemplatedStringExpression<'r, 'a> { + type Inflated = TemplatedStringExpression<'a>; + fn inflate(mut self, config: &Config<'a>) -> Result { + let whitespace_before_expression = parse_parenthesizable_whitespace( + config, + &mut (*self.lbrace_tok).whitespace_after.borrow_mut(), + )?; + let expression = self.expression.inflate(config)?; + let equal = self.equal.inflate(config)?; + let whitespace_after_expression = if let Some(after_expr_tok) = self.after_expr_tok.as_mut() + { + parse_parenthesizable_whitespace( + config, + &mut after_expr_tok.whitespace_before.borrow_mut(), + )? + } else { + Default::default() + }; + let format_spec = self.format_spec.inflate(config)?; + Ok(Self::Inflated { + expression, + conversion: self.conversion, + format_spec, + whitespace_before_expression, + whitespace_after_expression, + equal, + }) + } +} + +impl<'a> Codegen<'a> for TemplatedStringExpression<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + state.add_token("{"); + self.whitespace_before_expression.codegen(state); + self.expression.codegen(state); + if let Some(eq) = &self.equal { + eq.codegen(state); + } + self.whitespace_after_expression.codegen(state); + if let Some(conv) = &self.conversion { + state.add_token("!"); + state.add_token(conv); + } + if let Some(specs) = &self.format_spec { + state.add_token(":"); + for spec in specs { + spec.codegen(state); + } + } + state.add_token("}"); + } +} + +#[cst_node(ParenthesizedNode)] +pub struct TemplatedString<'a> { + pub parts: Vec>, + pub start: &'a str, + pub end: &'a str, + pub lpar: Vec>, + pub rpar: Vec>, +} + +impl<'r, 'a> Inflate<'a> for DeflatedTemplatedString<'r, 'a> { + type Inflated = TemplatedString<'a>; + fn inflate(self, config: &Config<'a>) -> Result { + let lpar = self.lpar.inflate(config)?; + let parts = self.parts.inflate(config)?; + let rpar = self.rpar.inflate(config)?; + Ok(Self::Inflated { + parts, + start: self.start, + end: self.end, + lpar, + rpar, + }) + } +} + +impl<'a> Codegen<'a> for TemplatedString<'a> { + fn codegen(&self, state: &mut CodegenState<'a>) { + self.parenthesize(state, |state| { + state.add_token(self.start); + for part in &self.parts { + part.codegen(state); + } + state.add_token(self.end); + }) + } +} + +#[cst_node(Codegen, Inflate)] +pub enum TemplatedStringContent<'a> { + Text(TemplatedStringText<'a>), + Expression(Box>), +} #[cst_node] pub struct FormattedStringText<'a> { pub value: &'a str, diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index ce02c86f..e044db94 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -31,7 +31,8 @@ pub use expression::{ Integer, Lambda, LeftCurlyBrace, LeftParen, LeftSquareBracket, List, ListComp, Name, NameOrAttribute, NamedExpr, Param, ParamSlash, ParamStar, Parameters, RightCurlyBrace, RightParen, RightSquareBracket, Set, SetComp, SimpleString, Slice, StarArg, StarredDictElement, - StarredElement, String, Subscript, SubscriptElement, Tuple, UnaryOperation, Yield, YieldValue, + StarredElement, String, Subscript, SubscriptElement, TemplatedString, TemplatedStringContent, + TemplatedStringExpression, Tuple, UnaryOperation, Yield, YieldValue, }; pub(crate) mod op; @@ -78,7 +79,10 @@ pub(crate) mod deflated { DeflatedSlice as Slice, DeflatedStarArg as StarArg, DeflatedStarredDictElement as StarredDictElement, DeflatedStarredElement as StarredElement, DeflatedString as String, DeflatedSubscript as Subscript, - DeflatedSubscriptElement as SubscriptElement, DeflatedTuple as Tuple, + DeflatedSubscriptElement as SubscriptElement, DeflatedTemplatedString as TemplatedString, + DeflatedTemplatedStringContent as TemplatedStringContent, + DeflatedTemplatedStringExpression as TemplatedStringExpression, + DeflatedTemplatedStringText as TemplatedStringText, DeflatedTuple as Tuple, DeflatedUnaryOperation as UnaryOperation, DeflatedYield as Yield, DeflatedYieldValue as YieldValue, }; diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 30a4b9bb..76920d66 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -8,6 +8,7 @@ use std::rc::Rc; use crate::expression::make_async; use crate::nodes::deflated::*; use crate::nodes::expression::make_fstringtext; +use crate::nodes::expression::make_tstringtext; use crate::nodes::op::make_importstar; use crate::nodes::traits::ParenthesizedDeflatedNode; use crate::parser::ParserError; @@ -17,7 +18,8 @@ use peg::str::LineCol; use peg::{parser, Parse, ParseElem, RuleResult}; use TokType::{ Async, Await as AWAIT, Dedent, EndMarker, FStringEnd, FStringStart, FStringString, Indent, - Name as NameTok, Newline as NL, Number, String as STRING, + Name as NameTok, Newline as NL, Number, String as STRING, TStringEnd, TStringStart, + TStringString, }; pub type Result<'a, T> = std::result::Result>; @@ -1043,7 +1045,7 @@ parser! { / n:lit("True") { Expression::Name(Box::new(make_name(n))) } / n:lit("False") { Expression::Name(Box::new(make_name(n))) } / n:lit("None") { Expression::Name(Box::new(make_name(n))) } - / &(tok(STRING, "") / tok(FStringStart, "")) s:strings() {s.into()} + / &(tok(STRING, "") / tok(FStringStart, "") / tok(TStringStart, "")) s:strings() {s.into()} / n:tok(Number, "NUMBER") { make_number(n) } / &lit("(") e:(tuple() / group() / (g:genexp() {Expression::GeneratorExp(Box::new(g))})) {e} / &lit("[") e:(list() / listcomp()) {e} @@ -1151,7 +1153,7 @@ parser! { rule strings() -> String<'input, 'a> = s:(str:tok(STRING, "STRING") t:&_ {(make_string(str), t)} - / str:fstring() t:&_ {(String::Formatted(str), t)})+ {? + / str:fstring() t:&_ {(String::Formatted(str), t)} / str:tstring() t:&_ {(String::Templated(str), t)})+ {? make_strings(s) } @@ -1463,6 +1465,34 @@ parser! { rule _f_spec() -> Vec> = (_f_string() / _f_replacement())* + // T-strings + + rule tstring() -> TemplatedString<'input, 'a> + = start:tok(TStringStart, "t\"") + parts:(_t_string() / _t_replacement())* + end:tok(TStringEnd, "\"") { + make_tstring(start.string, parts, end.string) + } + + rule _t_string() -> TemplatedStringContent<'input, 'a> + = t:tok(TStringString, "t-string contents") { + TemplatedStringContent::Text(make_tstringtext(t.string)) + } + + + rule _t_replacement() -> TemplatedStringContent<'input, 'a> + = lb:lit("{") e:annotated_rhs() eq:lit("=")? + conv:(t:lit("!") c:_f_conversion() {(t,c)})? + spec:(t:lit(":") s:_t_spec() {(t,s)})? + rb:lit("}") { + TemplatedStringContent::Expression(Box::new( + make_tstring_expression(lb, e, eq, conv, spec, rb) + )) + } + + rule _t_spec() -> Vec> + = (_t_string() / _t_replacement())* + // CST helpers rule comma() -> Comma<'input, 'a> @@ -2877,6 +2907,48 @@ fn make_strings<'input, 'a>( })) } +fn make_tstring_expression<'input, 'a>( + lbrace_tok: TokenRef<'input, 'a>, + expression: Expression<'input, 'a>, + eq: Option>, + conversion_pair: Option<(TokenRef<'input, 'a>, &'a str)>, + format_pair: Option<( + TokenRef<'input, 'a>, + Vec>, + )>, + rbrace_tok: TokenRef<'input, 'a>, +) -> TemplatedStringExpression<'input, 'a> { + let equal: Option> = eq.map(make_assign_equal); + let (conversion_tok, conversion) = if let Some((t, c)) = conversion_pair { + (Some(t), Some(c)) + } else { + (None, None) + }; + let (format_tok, format_spec) = if let Some((t, f)) = format_pair { + (Some(t), Some(f)) + } else { + (None, None) + }; + let after_expr_tok = if equal.is_some() { + None + } else if let Some(tok) = conversion_tok { + Some(tok) + } else if let Some(tok) = format_tok { + Some(tok) + } else { + Some(rbrace_tok) + }; + + TemplatedStringExpression { + expression, + conversion, + format_spec, + equal, + lbrace_tok, + after_expr_tok, + } +} + fn make_fstring_expression<'input, 'a>( lbrace_tok: TokenRef<'input, 'a>, expression: Expression<'input, 'a>, @@ -2933,6 +3005,20 @@ fn make_fstring<'input, 'a>( } } +fn make_tstring<'input, 'a>( + start: &'a str, + parts: Vec>, + end: &'a str, +) -> TemplatedString<'input, 'a> { + TemplatedString { + start, + parts, + end, + lpar: Default::default(), + rpar: Default::default(), + } +} + fn make_finally<'input, 'a>( finally_tok: TokenRef<'input, 'a>, colon_tok: TokenRef<'input, 'a>, diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 477bd868..120b6163 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -66,8 +66,9 @@ use std::fmt::Debug; use std::fmt::Formatter; use std::rc::Rc; +use crate::tokenizer::core::string_types::FTStringType; use crate::tokenizer::{ - core::string_types::{FStringNode, StringQuoteChar, StringQuoteSize}, + core::string_types::{FTStringNode, StringQuoteChar, StringQuoteSize}, operators::OPERATOR_RE, text_position::{TextPosition, TextPositionSnapshot}, whitespace_parser::State as WhitespaceState, @@ -86,7 +87,7 @@ thread_local! { static SPACE_TAB_FORMFEED_RE: Regex = Regex::new(r"\A[ \f\t]+").expect("regex"); static ANY_NON_NEWLINE_RE: Regex = Regex::new(r"\A[^\r\n]+").expect("regex"); static STRING_PREFIX_RE: Regex = - Regex::new(r"\A(?i)(u|[bf]r|r[bf]|r|b|f)").expect("regex"); + Regex::new(r"\A(?i)(u|[bf]r|r[bft]|r|b|f|t)").expect("regex"); static POTENTIAL_IDENTIFIER_TAIL_RE: Regex = Regex::new(r"\A([a-zA-Z0-9_]|[^\x00-\x7f])+").expect("regex"); static DECIMAL_DOT_DIGIT_RE: Regex = Regex::new(r"\A\.[0-9]").expect("regex"); @@ -118,6 +119,9 @@ pub enum TokType { FStringStart, FStringString, FStringEnd, + TStringStart, + TStringString, + TStringEnd, EndMarker, } @@ -222,8 +226,8 @@ pub struct TokState<'t> { /// /// Supporting this at the tokenizer-level is pretty nasty and adds a lot of complexity. /// Eventually, we should probably support this at the parser-level instead. - split_fstring: bool, - fstring_stack: Vec, + split_ftstring: bool, + ftstring_stack: Vec, missing_nl_before_eof: bool, } @@ -233,7 +237,7 @@ pub struct TokConfig { /// identifiers, depending on if they're being used in the context of an async function. This /// breaks async comprehensions outside of async functions. pub async_hacks: bool, - pub split_fstring: bool, + pub split_ftstring: bool, // Not currently supported: // type_comments: bool, } @@ -272,8 +276,8 @@ impl<'t> TokState<'t> { async_def: false, async_def_indent: 0, async_def_nl: false, - split_fstring: config.split_fstring, - fstring_stack: Vec::new(), + split_ftstring: config.split_ftstring, + ftstring_stack: Vec::new(), missing_nl_before_eof: text.is_empty() || text.as_bytes()[text.len() - 1] != b'\n', } } @@ -285,18 +289,18 @@ impl<'t> TokState<'t> { /// Implementation of `next()`, wrapped by next() to allow for easier error handling. Roughly /// equivalent to `tok_get` in the C source code. fn next_inner(&mut self) -> Result> { - if self.split_fstring { - if let Some(tos) = self.fstring_stack.last() { + if self.split_ftstring { + if let Some(tos) = self.ftstring_stack.last() { if !tos.is_in_expr() { self.start_pos = (&self.text_pos).into(); let is_in_format_spec = tos.is_in_format_spec(); let is_raw_string = tos.is_raw_string; if let Some(tok) = - self.maybe_consume_fstring_string(is_in_format_spec, is_raw_string)? + self.maybe_consume_ftstring_string(is_in_format_spec, is_raw_string)? { return Ok(tok); } - if let Some(tok) = self.maybe_consume_fstring_end() { + if let Some(tok) = self.maybe_consume_ftstring_end() { return Ok(tok); } } @@ -362,8 +366,11 @@ impl<'t> TokState<'t> { Some('\n') => { self.text_pos.next(); self.at_bol = true; - if self.split_fstring - && self.fstring_stack.last().map(|node| node.allow_multiline()) + if self.split_ftstring + && self + .ftstring_stack + .last() + .map(|node| node.allow_multiline()) == Some(false) { Err(TokError::UnterminatedString) @@ -420,7 +427,7 @@ impl<'t> TokState<'t> { Some(ch @ '(') | Some(ch @ '[') | Some(ch @ '{') => { self.text_pos.next(); - if let Some(tos) = self.fstring_stack.last_mut() { + if let Some(tos) = self.ftstring_stack.last_mut() { tos.open_parentheses(); } self.paren_stack.push((ch, self.text_pos.line_number())); @@ -429,7 +436,7 @@ impl<'t> TokState<'t> { Some(closing @ ')') | Some(closing @ ']') | Some(closing @ '}') => { self.text_pos.next(); - if let Some(tos) = self.fstring_stack.last_mut() { + if let Some(tos) = self.ftstring_stack.last_mut() { tos.close_parentheses(); } if let Some((opening, line_number)) = self.paren_stack.pop() { @@ -454,7 +461,7 @@ impl<'t> TokState<'t> { Some(':') if self - .fstring_stack + .ftstring_stack .last() .map(|tos| tos.parentheses_count - tos.format_spec_count == 1) .unwrap_or(false) => @@ -465,9 +472,9 @@ impl<'t> TokState<'t> { // // >>> f'{x:=10}' # Valid, passes '=10' to formatter let tos = self - .fstring_stack + .ftstring_stack .last_mut() - .expect("fstring_stack is not empty"); + .expect("ftstring_stack is not empty"); tos.format_spec_count += 1; self.text_pos.next(); Ok(TokType::Op) @@ -624,20 +631,27 @@ impl<'t> TokState<'t> { } fn consume_identifier_or_prefixed_string(&mut self) -> Result> { - // Process the various legal combinations of b"", r"", u"", and f"". + // Process the various legal combinations of b"", r"", u"",f"", and t"". if STRING_PREFIX_RE.with(|r| self.text_pos.consume(r)) { if let Some('"') | Some('\'') = self.text_pos.peek() { // We found a string, not an identifier. Bail! - if self.split_fstring - && self + if self.split_ftstring { + let res = match self .text_pos .slice_from_start_pos(&self.start_pos) - .contains(&['f', 'F'][..]) - { - return self.consume_fstring_start(); - } else { - return self.consume_string(); + .chars() + .find(|c| matches!(c, 'f' | 'F' | 't' | 'T')) + { + Some('f' | 'F') => Some(FTStringType::FString), + Some('t' | 'T') => Some(FTStringType::TString), + _ => None, + }; + if let Some(str_type) = res { + // Consume the prefix and return the start token + return self.consume_prefixed_string_start(str_type); + } } + return self.consume_string(); } } else { // the next character must be a potential identifier start, aka `[a-zA-Z_]|[^\x00-\x7f]` @@ -880,24 +894,43 @@ impl<'t> TokState<'t> { Ok(TokType::String) } - fn consume_fstring_start(&mut self) -> Result> { + fn consume_prefixed_string_start( + &mut self, + str_type: FTStringType, + ) -> Result> { + // Consumes everything after the (f|t) but before the actual string. let (quote_char, quote_size) = self.consume_open_quote(); let is_raw_string = self .text_pos .slice_from_start_pos(&self.start_pos) .contains(&['r', 'R'][..]); - self.fstring_stack - .push(FStringNode::new(quote_char, quote_size, is_raw_string)); - Ok(TokType::FStringStart) + self.ftstring_stack.push(FTStringNode::new( + quote_char, + quote_size, + is_raw_string, + str_type.clone(), + )); + + match str_type { + FTStringType::FString => Ok(TokType::FStringStart), + FTStringType::TString => Ok(TokType::TStringStart), + } } - fn maybe_consume_fstring_string( + fn maybe_consume_ftstring_string( &mut self, is_in_format_spec: bool, is_raw_string: bool, ) -> Result, TokError<'t>> { - let allow_multiline = - self.fstring_stack.last().map(|node| node.allow_multiline()) == Some(true); + let allow_multiline = self + .ftstring_stack + .last() + .map(|node| node.allow_multiline()) + == Some(true); + let str_type = self + .ftstring_stack + .last() + .map(|node| node.string_type.clone()); let mut in_named_unicode: bool = false; let mut ok_result = Ok(None); // value to return if we reach the end and don't error out 'outer: loop { @@ -910,7 +943,7 @@ impl<'t> TokState<'t> { } (ch @ Some('\''), _) | (ch @ Some('"'), _) => { // see if this actually terminates the most recent fstring - if let Some(node) = self.fstring_stack.last() { + if let Some(node) = self.ftstring_stack.last() { if ch == Some(node.quote_char.into()) { match node.quote_size { StringQuoteSize::Single => { @@ -999,22 +1032,30 @@ impl<'t> TokState<'t> { self.text_pos.next(); } } - ok_result = Ok(Some(TokType::FStringString)); + ok_result = match str_type { + Some(FTStringType::FString) => Ok(Some(TokType::FStringString)), + Some(FTStringType::TString) => Ok(Some(TokType::TStringString)), + None => unreachable!("We should always have a string type"), + }; } ok_result } - fn maybe_consume_fstring_end(&mut self) -> Option { + fn maybe_consume_ftstring_end(&mut self) -> Option { let ch = self.text_pos.peek(); - if let Some(node) = self.fstring_stack.last() { + if let Some(node) = self.ftstring_stack.last() { if ch == Some(node.quote_char.into()) { if node.quote_size == StringQuoteSize::Triple { self.text_pos.consume(node.quote_char.triple_str()); } else { self.text_pos.next(); // already matched } - self.fstring_stack.pop(); - return Some(TokType::FStringEnd); + let tok_type = match node.string_type { + FTStringType::FString => TokType::FStringEnd, + FTStringType::TString => TokType::TStringEnd, + }; + self.ftstring_stack.pop(); + return Some(tok_type); } } None diff --git a/native/libcst/src/tokenizer/core/string_types.rs b/native/libcst/src/tokenizer/core/string_types.rs index b04ccb39..09a51851 100644 --- a/native/libcst/src/tokenizer/core/string_types.rs +++ b/native/libcst/src/tokenizer/core/string_types.rs @@ -67,7 +67,13 @@ impl TryFrom> for StringQuoteChar { } #[derive(Clone)] -pub struct FStringNode { +pub enum FTStringType { + FString, + TString, +} + +#[derive(Clone)] +pub struct FTStringNode { pub quote_char: StringQuoteChar, pub quote_size: StringQuoteSize, pub parentheses_count: usize, @@ -75,13 +81,16 @@ pub struct FStringNode { // In the syntax there can be multiple format_spec's nested: {x:{y:3}} pub format_spec_count: usize, pub is_raw_string: bool, + // ftstring type; either f-string or a t-string + pub string_type: FTStringType, } -impl FStringNode { +impl FTStringNode { pub fn new( quote_char: StringQuoteChar, quote_size: StringQuoteSize, is_raw_string: bool, + string_type: FTStringType, ) -> Self { Self { quote_char, @@ -90,6 +99,7 @@ impl FStringNode { string_start: None, format_spec_count: 0, is_raw_string, + string_type, } } diff --git a/native/libcst/src/tokenizer/operators.rs b/native/libcst/src/tokenizer/operators.rs index 3252f774..51352900 100644 --- a/native/libcst/src/tokenizer/operators.rs +++ b/native/libcst/src/tokenizer/operators.rs @@ -60,7 +60,7 @@ pub const OPERATORS: &[&str] = &[ "->", // RARROW "...", // ELLIPSIS ":=", // COLONEQUAL - // Not a real operator, but needed to support the split_fstring feature + // Not a real operator, but needed to support the split_ftstring feature "!", // The fake operator added by PEP 401. Technically only valid if used with: // diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index af79971d..20188f47 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -11,7 +11,7 @@ use crate::tokenizer::core::{TokConfig, TokError, TokState, TokType}; fn default_config() -> TokConfig { TokConfig { async_hacks: false, - split_fstring: false, + split_ftstring: false, } } @@ -534,7 +534,7 @@ fn test_string_prefix() { Ok(vec![(TokType::String, r#"r'\\'"#)]), ); let config = TokConfig { - split_fstring: true, + split_ftstring: true, ..default_config() }; assert_eq!( @@ -564,9 +564,9 @@ fn test_string_prefix() { } #[test] -fn test_split_fstring() { +fn test_split_ftstring() { let config = TokConfig { - split_fstring: true, + split_ftstring: true, ..default_config() }; @@ -662,7 +662,7 @@ fn test_split_fstring() { #[test] fn test_fstring_escapes() { let config = TokConfig { - split_fstring: true, + split_ftstring: true, ..default_config() }; assert_eq!( @@ -831,7 +831,7 @@ fn test_inconsistent_indentation_at_eof() { #[test] fn test_nested_f_string_specs() { let config = TokConfig { - split_fstring: true, + split_ftstring: true, ..default_config() }; assert_eq!( @@ -857,7 +857,7 @@ fn test_nested_f_string_specs() { #[test] fn test_nested_f_strings() { let config = TokConfig { - split_fstring: true, + split_ftstring: true, ..default_config() }; assert_eq!( @@ -875,3 +875,45 @@ fn test_nested_f_strings() { ]) ) } +#[test] +fn test_can_tokenize_t_string_basic() { + let config = TokConfig { + split_ftstring: true, + ..default_config() + }; + assert_eq!( + tokenize_all("t'Nothing to see here, move along'", &config), + Ok(vec![ + (TokType::TStringStart, "t'"), + (TokType::TStringString, "Nothing to see here, move along"), + (TokType::TStringEnd, "'") + ]) + ) +} +#[test] +fn test_can_tokenize_f_and_t_strings() { + let config = TokConfig { + split_ftstring: true, + ..default_config() + }; + assert_eq!( + tokenize_all("t\"TMiddle{f'FMiddle{t'{2}'}'}\"", &config), + Ok(vec![ + (TokType::TStringStart, "t\""), + (TokType::TStringString, "TMiddle"), + (TokType::Op, "{"), + (TokType::FStringStart, "f'"), + (TokType::FStringString, "FMiddle"), + (TokType::Op, "{"), + (TokType::TStringStart, "t'"), + (TokType::Op, "{"), + (TokType::Number, "2"), + (TokType::Op, "}"), + (TokType::TStringEnd, "'"), + (TokType::Op, "}"), + (TokType::FStringEnd, "'"), + (TokType::Op, "}"), + (TokType::TStringEnd, "\"") + ]) + ) +} diff --git a/native/libcst/tests/fixtures/super_strings.py b/native/libcst/tests/fixtures/super_strings.py index 25eee0e0..369270ab 100644 --- a/native/libcst/tests/fixtures/super_strings.py +++ b/native/libcst/tests/fixtures/super_strings.py @@ -48,3 +48,11 @@ f'some words {a+b:.3f} more words {c+d=} final words' f"{'':*^{1:{1}}}" f"{'':*^{1:{1:{1}}}}" f"{f"{f"{f"{f"{f"{1+1}"}"}"}"}"}" + + +t'Nothing to see here, move along' +t"User {action}: {amount:.2f} {item}" +t"

HTML is code {too}

" +t"value={value!r}" +t"This wrinkles my brain {value:.{precision}f}" +_ = t"everything" + t" is {tstrings}" From 3b5329aa2022d97972d2f58eefe61b759b8e56c1 Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Tue, 9 Sep 2025 11:16:49 -0400 Subject: [PATCH 620/632] feat: add support for PEP758 (#1401) PEP758 removes the requirement for parentheses to surround exceptions in except and except* expressions when 'as' is not present. This pr implements support for parsing these types of statements --- libcst/_nodes/tests/test_try.py | 60 +++++++++++++++++++ native/libcst/src/parser/grammar.rs | 37 +++++++++--- .../libcst/tests/fixtures/terrible_tries.py | 22 +++++++ 3 files changed, 110 insertions(+), 9 deletions(-) diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index 5704d098..c5ae2462 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -344,6 +344,34 @@ class TryTest(CSTNodeTest): ), "code": "try: pass\nexcept foo()as bar: pass\n", }, + # PEP758 - Multiple exceptions with no parentheses + { + "node": cst.Try( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=[ + cst.ExceptHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Tuple( + elements=[ + cst.Element( + value=cst.Name( + value="ValueError", + ), + ), + cst.Element( + value=cst.Name( + value="RuntimeError", + ), + ), + ], + lpar=[], + rpar=[], + ), + ) + ], + ), + "code": "try: pass\nexcept ValueError, RuntimeError: pass\n", + }, ) ) def test_valid(self, **kwargs: Any) -> None: @@ -576,6 +604,38 @@ class TryStarTest(CSTNodeTest): "parser": native_parse_statement, "expected_position": CodeRange((1, 0), (5, 13)), }, + # PEP758 - Multiple exceptions with no parentheses + { + "node": cst.TryStar( + cst.SimpleStatementSuite((cst.Pass(),)), + handlers=[ + cst.ExceptStarHandler( + cst.SimpleStatementSuite((cst.Pass(),)), + type=cst.Tuple( + elements=[ + cst.Element( + value=cst.Name( + value="ValueError", + ), + comma=cst.Comma( + whitespace_after=cst.SimpleWhitespace(" ") + ), + ), + cst.Element( + value=cst.Name( + value="RuntimeError", + ), + ), + ], + lpar=[], + rpar=[], + ), + ) + ], + ), + "code": "try: pass\nexcept* ValueError, RuntimeError: pass\n", + "parser": native_parse_statement, + }, ) ) def test_valid(self, **kwargs: Any) -> None: diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 76920d66..86823961 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -554,12 +554,21 @@ parser! { } // Except statement - rule except_block() -> ExceptHandler<'input, 'a> = kw:lit("except") e:expression() a:(k:lit("as") n:name() {(k, n)})? col:lit(":") b:block() { make_except(kw, Some(e), a, col, b) } + / kw:lit("except") e:expression() other:(c:comma() ex:expression() {(c, ex)})+ tc:(c:comma())? + col:lit(":") b:block() { + let tuple = Expression::Tuple(Box::new(Tuple { + elements: comma_separate(expr_to_element(e), other.into_iter().map(|(comma, expr)| (comma, expr_to_element(expr))).collect(), tc), + lpar: vec![], + rpar: vec![], + })); + + make_except(kw, Some(tuple), None, col, b) + } / kw:lit("except") col:lit(":") b:block() { make_except(kw, None, None, col, b) } @@ -569,6 +578,16 @@ parser! { a:(k:lit("as") n:name() {(k, n)})? col:lit(":") b:block() { make_except_star(kw, star, e, a, col, b) } + / kw:lit("except") star:lit("*") e:expression() other:(c:comma() ex:expression() {(c, ex)})+ tc:(c:comma())? + col:lit(":") b:block() { + let tuple = Expression::Tuple(Box::new(Tuple { + elements: comma_separate(expr_to_element(e), other.into_iter().map(|(comma, expr)| (comma, expr_to_element(expr))).collect(), tc), + lpar: vec![], + rpar: vec![], + })); + + make_except_star(kw, star, tuple, None, col, b) + } rule finally_block() -> Finally<'input, 'a> = kw:lit("finally") col:lit(":") b:block() { @@ -1550,22 +1569,22 @@ parser! { rule separated(el: rule, sep: rule) -> (El, Vec<(Sep, El)>) = e:el() rest:(s:sep() e:el() {(s, e)})* {(e, rest)} - rule traced(e: rule) -> T = - &(_* { + rule traced(e: rule) -> T = + &(_* { #[cfg(feature = "trace")] { println!("[PEG_INPUT_START]"); println!("{}", input); println!("[PEG_TRACE_START]"); } - }) - e:e()? {? + }) + e:e()? {? #[cfg(feature = "trace")] - println!("[PEG_TRACE_STOP]"); - e.ok_or("") - } + println!("[PEG_TRACE_STOP]"); + e.ok_or("") + } - } + } } #[allow(clippy::too_many_arguments)] diff --git a/native/libcst/tests/fixtures/terrible_tries.py b/native/libcst/tests/fixtures/terrible_tries.py index 91d6831e..eb5429cc 100644 --- a/native/libcst/tests/fixtures/terrible_tries.py +++ b/native/libcst/tests/fixtures/terrible_tries.py @@ -69,3 +69,25 @@ except foo: pass #9 + +try: + pass +except (foo, bar): + pass + +try: + pass +except foo, bar: + pass + +try: + pass +except (foo, bar), baz: + pass +else: + pass + +try: + pass +except* something, somethingelse: + pass \ No newline at end of file From 47cacb69a32520be52975527e0cc87a4b4d5cd77 Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Tue, 9 Sep 2025 15:14:29 -0400 Subject: [PATCH 621/632] bump version to 1.8.4 (#1402) --- CHANGELOG.md | 12 ++++++++++++ README.rst | 2 +- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 5 files changed, 18 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b2bd514..96b415e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +# 1.8.4 - 2025-09-09 + +## What's Changed +* fixed: generate Attribute nodes when applying type annotations by @tungol in https://github.com/Instagram/LibCST/pull/1396 +* added: Support parsing of t-strings #1374 by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1398 +* added: add support for PEP758 by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1401 + +## New Contributors +* @tungol made their first contribution in https://github.com/Instagram/LibCST/pull/1396 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.2...v1.8.4 + # 1.8.3 - 2025-08-29 ## What's Changed * removed: remove entry points to pure parser by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1375 diff --git a/README.rst b/README.rst index 0919ed8f..aaff5f41 100644 --- a/README.rst +++ b/README.rst @@ -37,7 +37,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python .. intro-start -LibCST parses Python 3.0 -> 3.13 source code as a CST tree that keeps +LibCST parses Python 3.0 -> 3.14 source code as a CST tree that keeps all formatting details (comments, whitespaces, parentheses, etc). It's useful for building automated refactoring (codemod) applications and linters. diff --git a/native/Cargo.lock b/native/Cargo.lock index 7d39b6ec..8a05c928 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -286,7 +286,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.8.3" +version = "1.8.4" dependencies = [ "annotate-snippets", "criterion", @@ -304,7 +304,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.8.3" +version = "1.8.4" dependencies = [ "quote", "syn", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index b3abfa63..42f1444a 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.8.3" +version = "1.8.4" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.5" annotate-snippets = "0.11.5" regex = "1.11.2" memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.8.3" } +libcst_derive = { path = "../libcst_derive", version = "1.8.4" } [dev-dependencies] criterion = { version = "0.6.0", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 9541f243..72c46021 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.8.3" +version = "1.8.4" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From de5635394b5627cb86656147ba2f96e835ed3d44 Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Thu, 25 Sep 2025 23:44:58 -0400 Subject: [PATCH 622/632] fix: circular import error (#1406) * fix: circular import error --- libcst/_parser/entrypoints.py | 3 ++- libcst/tests/test_import.py | 12 ++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 libcst/tests/test_import.py diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index d034258c..bab45ece 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -12,7 +12,6 @@ information from functools import partial from typing import Union -from libcst import native from libcst._nodes.base import CSTNode from libcst._nodes.expression import BaseExpression from libcst._nodes.module import Module @@ -34,6 +33,8 @@ def _parse( encoding, source_str = convert_to_utf8(source, partial=config) + from libcst import native + if entrypoint == "file_input": parse = partial(native.parse_module, encoding=encoding) elif entrypoint == "stmt_input": diff --git a/libcst/tests/test_import.py b/libcst/tests/test_import.py new file mode 100644 index 00000000..cad8883d --- /dev/null +++ b/libcst/tests/test_import.py @@ -0,0 +1,12 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +from unittest import TestCase + + +class TestImport(TestCase): + def test_import_libcst(self) -> None: + import libcst # noqa: F401 From 7c906eb47c02dca8d5b4273a032805516361c8cb Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Fri, 26 Sep 2025 01:03:35 -0400 Subject: [PATCH 623/632] bump version to 1.8.5 (#1407) --- CHANGELOG.md | 6 ++++++ native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 96b415e5..9cd743d1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +# 1.8.5 - 2025-09-25 + +## What's Changed +* fixed: circular import error by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1406 + + # 1.8.4 - 2025-09-09 ## What's Changed diff --git a/native/Cargo.lock b/native/Cargo.lock index 8a05c928..4ca4809c 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -286,7 +286,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.8.4" +version = "1.8.5" dependencies = [ "annotate-snippets", "criterion", @@ -304,7 +304,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.8.4" +version = "1.8.5" dependencies = [ "quote", "syn", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 42f1444a..d65a60cc 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.8.4" +version = "1.8.5" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.5" annotate-snippets = "0.11.5" regex = "1.11.2" memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.8.4" } +libcst_derive = { path = "../libcst_derive", version = "1.8.5" } [dev-dependencies] criterion = { version = "0.6.0", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index 72c46021..c054c32d 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.8.4" +version = "1.8.5" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From 6f5da5f99873e4c17e32ca221a3ddde3fb1ea354 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 12 Oct 2025 21:46:50 -0700 Subject: [PATCH 624/632] build(deps): bump astral-sh/setup-uv from 6 to 7 (#1416) Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 6 to 7. - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v6...v7) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-version: '7' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 8 ++++---- .github/workflows/pypi_upload.yml | 2 +- .github/workflows/zizmor.yml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d8d7d244..dd3665ad 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,7 +26,7 @@ jobs: - "3.14t" steps: - name: Install uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: version: "0.7.13" python-version: ${{ matrix.python-version }} @@ -51,7 +51,7 @@ jobs: fetch-depth: 0 persist-credentials: false - name: Install uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: version: "0.7.13" python-version: "3.10" @@ -67,7 +67,7 @@ jobs: fetch-depth: 0 persist-credentials: false - name: Install uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: version: "0.7.13" python-version: "3.10" @@ -82,7 +82,7 @@ jobs: fetch-depth: 0 persist-credentials: false - name: Install uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: version: "0.7.13" python-version: "3.10" diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index d9d0bcb5..04434a24 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -34,7 +34,7 @@ jobs: with: python-version: "3.10" - name: Install uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: version: "0.7.13" enable-cache: false diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml index 26adcaf2..32d62335 100644 --- a/.github/workflows/zizmor.yml +++ b/.github/workflows/zizmor.yml @@ -21,7 +21,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 - name: Run zizmor 🌈 run: uvx zizmor --format sarif . > results.sarif From 129b20f4769a2b26b41b262fb88f9aaf8754930b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 12 Oct 2025 21:47:18 -0700 Subject: [PATCH 625/632] build(deps): bump github/codeql-action from 3 to 4 (#1415) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3 to 4. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v3...v4) --- updated-dependencies: - dependency-name: github/codeql-action dependency-version: '4' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/zizmor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml index 32d62335..47fdfe00 100644 --- a/.github/workflows/zizmor.yml +++ b/.github/workflows/zizmor.yml @@ -29,7 +29,7 @@ jobs: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@v3 + uses: github/codeql-action/upload-sarif@v4 with: sarif_file: results.sarif category: zizmor \ No newline at end of file From 421f7d34004de18391014319ec6a818322637a8d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 12 Oct 2025 21:47:40 -0700 Subject: [PATCH 626/632] build(deps): bump pypa/cibuildwheel from 3.1.4 to 3.2.1 (#1414) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 3.1.4 to 3.2.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v3.1.4...v3.2.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-version: 3.2.1 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9f1c6d9f..742a4176 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -44,7 +44,7 @@ jobs: if: github.event_name != 'release' run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV - name: Build wheels - uses: pypa/cibuildwheel@v3.1.4 + uses: pypa/cibuildwheel@v3.2.1 - uses: actions/upload-artifact@v4 with: path: wheelhouse/*.whl From 73b17d8449034b520e7128c8a4bd100674d1e78c Mon Sep 17 00:00:00 2001 From: Itamar Oren Date: Fri, 24 Oct 2025 13:49:25 -0700 Subject: [PATCH 627/632] Update pyproject.toml for 3.14t (#1417) - Update description to include 3.14 - Add 3.14 and free-threading trove classifiers - Update deps to switch back to pyyaml for 3.14 --- pyproject.toml | 7 +++- uv.lock | 111 ++++++++++++++++++++++++++++++++++++++++++++----- 2 files changed, 106 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 394eca43..f29b2474 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools", "setuptools-scm", "setuptools-rust", "wheel"] [project] name = "libcst" -description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.13 programs." +description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.14 programs." readme = "README.rst" dynamic = ["version"] license = { file = "LICENSE" } @@ -15,12 +15,15 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Programming Language :: Python :: Free Threading", "Typing :: Typed", ] requires-python = ">=3.9" dependencies = [ "pyyaml>=5.2; python_version < '3.13'", - "pyyaml-ft>=8.0.0; python_version >= '3.13'", + "pyyaml-ft>=8.0.0; python_version == '3.13'", + "pyyaml>=6.0.3; python_version >= '3.14'", "typing-extensions; python_version < '3.10'", ] diff --git a/uv.lock b/uv.lock index 2cc4db06..f271e1ea 100644 --- a/uv.lock +++ b/uv.lock @@ -2,7 +2,8 @@ version = 1 revision = 2 requires-python = ">=3.9" resolution-markers = [ - "python_full_version >= '3.13'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", "python_full_version < '3.10'", @@ -25,7 +26,8 @@ name = "alabaster" version = "1.0.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] @@ -400,7 +402,8 @@ name = "click" version = "8.2.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] @@ -841,7 +844,8 @@ name = "ipython" version = "9.3.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", "python_full_version >= '3.11' and python_full_version < '3.13'", ] dependencies = [ @@ -1062,7 +1066,8 @@ dependencies = [ { name = "jsonschema", extra = ["format-nongpl"] }, { name = "packaging" }, { name = "python-json-logger" }, - { name = "pyyaml" }, + { name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, + { name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, { name = "referencing" }, { name = "rfc3339-validator" }, { name = "rfc3986-validator" }, @@ -1205,8 +1210,9 @@ wheels = [ name = "libcst" source = { editable = "." } dependencies = [ - { name = "pyyaml", marker = "python_full_version < '3.13'" }, - { name = "pyyaml-ft", marker = "python_full_version >= '3.13'" }, + { name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.13'" }, + { name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, + { name = "pyyaml-ft", marker = "python_full_version == '3.13.*'" }, { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] @@ -1257,7 +1263,8 @@ docs = [ [package.metadata] requires-dist = [ { name = "pyyaml", marker = "python_full_version < '3.13'", specifier = ">=5.2" }, - { name = "pyyaml-ft", marker = "python_full_version >= '3.13'", specifier = ">=8.0.0" }, + { name = "pyyaml", marker = "python_full_version >= '3.14'", specifier = ">=6.0.3" }, + { name = "pyyaml-ft", marker = "python_full_version == '3.13.*'", specifier = ">=8.0.0" }, { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] @@ -1654,7 +1661,8 @@ version = "0.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pastel" }, - { name = "pyyaml" }, + { name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, + { name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d6/b1/d4f4361b278fae10f6074675385ce3acf53c647f8e6eeba22c652f8ba985/poethepoet-0.35.0.tar.gz", hash = "sha256:b396ae862d7626e680bbd0985b423acf71634ce93a32d8b5f38340f44f5fbc3e", size = 66006, upload-time = "2025-06-09T12:58:18.849Z" } @@ -1859,6 +1867,12 @@ wheels = [ name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", + "python_full_version == '3.10.*'", + "python_full_version < '3.10'", +] sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, @@ -1908,6 +1922,82 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, ] +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14'", +] +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/67fc8e68a75f738c9200422bf65693fb79a4cd0dc5b23310e5202e978090/pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da", size = 184450, upload-time = "2025-09-25T21:33:00.618Z" }, + { url = "https://files.pythonhosted.org/packages/ae/92/861f152ce87c452b11b9d0977952259aa7df792d71c1053365cc7b09cc08/pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917", size = 174319, upload-time = "2025-09-25T21:33:02.086Z" }, + { url = "https://files.pythonhosted.org/packages/d0/cd/f0cfc8c74f8a030017a2b9c771b7f47e5dd702c3e28e5b2071374bda2948/pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9", size = 737631, upload-time = "2025-09-25T21:33:03.25Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b2/18f2bd28cd2055a79a46c9b0895c0b3d987ce40ee471cecf58a1a0199805/pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5", size = 836795, upload-time = "2025-09-25T21:33:05.014Z" }, + { url = "https://files.pythonhosted.org/packages/73/b9/793686b2d54b531203c160ef12bec60228a0109c79bae6c1277961026770/pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a", size = 750767, upload-time = "2025-09-25T21:33:06.398Z" }, + { url = "https://files.pythonhosted.org/packages/a9/86/a137b39a611def2ed78b0e66ce2fe13ee701a07c07aebe55c340ed2a050e/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926", size = 727982, upload-time = "2025-09-25T21:33:08.708Z" }, + { url = "https://files.pythonhosted.org/packages/dd/62/71c27c94f457cf4418ef8ccc71735324c549f7e3ea9d34aba50874563561/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7", size = 755677, upload-time = "2025-09-25T21:33:09.876Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/6f5e0d58bd924fb0d06c3a6bad00effbdae2de5adb5cda5648006ffbd8d3/pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0", size = 142592, upload-time = "2025-09-25T21:33:10.983Z" }, + { url = "https://files.pythonhosted.org/packages/f0/0c/25113e0b5e103d7f1490c0e947e303fe4a696c10b501dea7a9f49d4e876c/pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007", size = 158777, upload-time = "2025-09-25T21:33:15.55Z" }, +] + [[package]] name = "pyyaml-ft" version = "8.0.0" @@ -2347,7 +2437,8 @@ name = "sphinx" version = "8.1.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] From c2169d240b5011ea820dc18d7a2452f182c2c5d3 Mon Sep 17 00:00:00 2001 From: Colin Watson Date: Tue, 28 Oct 2025 19:37:35 +0000 Subject: [PATCH 628/632] Update PyO3 to 0.26 (#1413) --- native/Cargo.lock | 25 ++++++++++----------- native/libcst/Cargo.toml | 2 +- native/libcst/src/nodes/expression.rs | 8 +++---- native/libcst/src/nodes/macros.rs | 4 ++-- native/libcst/src/nodes/parser_config.rs | 26 +++++++++++----------- native/libcst/src/nodes/py_cached.rs | 16 +++++++------- native/libcst/src/nodes/traits.rs | 28 ++++++++++++------------ native/libcst/src/parser/errors.rs | 2 +- native/libcst/src/py.rs | 12 +++++----- 9 files changed, 61 insertions(+), 62 deletions(-) diff --git a/native/Cargo.lock b/native/Cargo.lock index 4ca4809c..0ba138ea 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -355,9 +355,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.16.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "oorandom" @@ -443,9 +443,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a" +checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383" dependencies = [ "indoc", "libc", @@ -460,19 +460,18 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598" +checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f" dependencies = [ - "once_cell", "target-lexicon", ] [[package]] name = "pyo3-ffi" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c" +checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105" dependencies = [ "libc", "pyo3-build-config", @@ -480,9 +479,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50" +checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -492,9 +491,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc" +checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf" dependencies = [ "heck", "proc-macro2", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index d65a60cc..4fb029ca 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -36,7 +36,7 @@ trace = ["peg/trace"] [dependencies] paste = "1.0.15" -pyo3 = { version = "0.25.1", optional = true } +pyo3 = { version = "0.26", optional = true } thiserror = "2.0.12" peg = "0.8.5" annotate-snippets = "0.11.5" diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index 961eee9f..c72d301d 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -2670,8 +2670,8 @@ mod py { use crate::nodes::traits::py::TryIntoPy; // TODO: this could be a derive helper attribute to override the python class name - impl<'a> TryIntoPy for Element<'a> { - fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { + impl<'a> TryIntoPy> for Element<'a> { + fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult> { match self { Self::Starred(s) => s.try_into_py(py), Self::Simple { value, comma } => { @@ -2699,8 +2699,8 @@ mod py { } // TODO: this could be a derive helper attribute to override the python class name - impl<'a> TryIntoPy for DictElement<'a> { - fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { + impl<'a> TryIntoPy> for DictElement<'a> { + fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult> { match self { Self::Starred(s) => s.try_into_py(py), Self::Simple { diff --git a/native/libcst/src/nodes/macros.rs b/native/libcst/src/nodes/macros.rs index c9ed49c1..11028b8c 100644 --- a/native/libcst/src/nodes/macros.rs +++ b/native/libcst/src/nodes/macros.rs @@ -17,8 +17,8 @@ macro_rules! py_import { ( $module_name:expr, $member_name:expr, $getter_fn:ident ) => { paste::paste! { static [] - : pyo3::once_cell::GILOnceCell> - = pyo3::once_cell::GILOnceCell::new(); + : pyo3::once_cell::PyOnceLock>> + = pyo3::once_cell::PyOnceLock::new(); fn $getter_fn<'py>(py: pyo3::Python<'py>) -> pyo3::PyResult<&'py pyo3::PyAny> { Ok([].get_or_init(py, || { diff --git a/native/libcst/src/nodes/parser_config.rs b/native/libcst/src/nodes/parser_config.rs index c10d86d5..e274d8df 100644 --- a/native/libcst/src/nodes/parser_config.rs +++ b/native/libcst/src/nodes/parser_config.rs @@ -29,12 +29,12 @@ impl BaseWhitespaceParserConfig { } #[getter] - fn get_lines(&self, py: Python) -> PyObject { + fn get_lines(&self, py: Python) -> Py { self.lines.to_object(py) } #[getter] - fn get_default_newline(&self, py: Python) -> PyObject { + fn get_default_newline(&self, py: Python) -> Py { self.default_newline.to_object(py) } } @@ -62,23 +62,23 @@ impl BaseWhitespaceParserConfig { } } -// These fields are private and PyObject, since we don't currently care about using them from +// These fields are private and Py, since we don't currently care about using them from // within rust. #[pyclass(extends=BaseWhitespaceParserConfig, module="libcst_native.parser_config")] #[text_signature = "(*, lines, encoding, default_indent, default_newline, has_trailing_newline, version, future_imports)"] pub struct ParserConfig { // lines is inherited #[pyo3(get)] - encoding: PyObject, + encoding: Py, #[pyo3(get)] - default_indent: PyObject, + default_indent: Py, // default_newline is inherited #[pyo3(get)] - has_trailing_newline: PyObject, + has_trailing_newline: Py, #[pyo3(get)] - version: PyObject, + version: Py, #[pyo3(get)] - future_imports: PyObject, + future_imports: Py, } #[pymethods] @@ -86,12 +86,12 @@ impl ParserConfig { #[new] fn new( lines: &PySequence, - encoding: PyObject, - default_indent: PyObject, + encoding: Py, + default_indent: Py, default_newline: &PyString, - has_trailing_newline: PyObject, - version: PyObject, - future_imports: PyObject, + has_trailing_newline: Py, + version: Py, + future_imports: Py, ) -> PyResult<(Self, BaseWhitespaceParserConfig)> { Ok(( Self { diff --git a/native/libcst/src/nodes/py_cached.rs b/native/libcst/src/nodes/py_cached.rs index d2398051..307082da 100644 --- a/native/libcst/src/nodes/py_cached.rs +++ b/native/libcst/src/nodes/py_cached.rs @@ -7,11 +7,11 @@ use pyo3::prelude::*; use std::convert::AsRef; use std::ops::Deref; -/// An immutable wrapper around a rust type T and it's PyObject equivalent. Caches the conversion -/// to and from the PyObject. +/// An immutable wrapper around a rust type T and its Py equivalent. Caches the conversion +/// to and from the Py. pub struct PyCached { native: T, - py_object: PyObject, + py_object: Py, } impl PyCached @@ -31,7 +31,7 @@ where T: FromPyObject<'source>, { fn extract(ob: &'source PyAny) -> PyResult { - Python::with_gil(|py| { + Python::attach(|py| { Ok(PyCached { native: ob.extract()?, py_object: ob.to_object(py), @@ -40,14 +40,14 @@ where } } -impl IntoPy for PyCached { - fn into_py(self, _py: Python) -> PyObject { +impl IntoPy> for PyCached { + fn into_py(self, _py: Python) -> Py { self.py_object } } impl ToPyObject for PyCached { - fn to_object(&self, py: Python) -> PyObject { + fn to_object(&self, py: Python) -> Py { self.py_object.clone_ref(py) } } @@ -71,6 +71,6 @@ where T: ToPyObject, { fn from(val: T) -> Self { - Python::with_gil(|py| Self::new(py, val)) + Python::attach(|py| Self::new(py, val)) } } diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index df61538b..c15a60e1 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -118,7 +118,7 @@ impl<'a, T: Inflate<'a>> Inflate<'a> for Vec { } #[cfg(feature = "py")] pub mod py { - use pyo3::{types::PyTuple, IntoPyObjectExt, PyObject, PyResult, Python}; + use pyo3::{types::PyTuple, IntoPyObjectExt, Py, PyAny, PyResult, Python}; // TODO: replace with upstream implementation once // https://github.com/PyO3/pyo3/issues/1813 is resolved @@ -133,26 +133,26 @@ pub mod py { // } // } - impl TryIntoPy for bool { - fn try_into_py(self, py: Python) -> PyResult { + impl TryIntoPy> for bool { + fn try_into_py(self, py: Python) -> PyResult> { self.into_py_any(py) } } - impl> TryIntoPy for Box + impl>> TryIntoPy> for Box where - T: TryIntoPy, + T: TryIntoPy>, { - fn try_into_py(self, py: Python) -> PyResult { + fn try_into_py(self, py: Python) -> PyResult> { (*self).try_into_py(py) } } - impl TryIntoPy for Option + impl TryIntoPy> for Option where - T: TryIntoPy, + T: TryIntoPy>, { - fn try_into_py(self, py: Python) -> PyResult { + fn try_into_py(self, py: Python) -> PyResult> { Ok(match self { None => py.None(), Some(x) => x.try_into_py(py)?, @@ -160,11 +160,11 @@ pub mod py { } } - impl TryIntoPy for Vec + impl TryIntoPy> for Vec where - T: TryIntoPy, + T: TryIntoPy>, { - fn try_into_py(self, py: Python) -> PyResult { + fn try_into_py(self, py: Python) -> PyResult> { let converted = self .into_iter() .map(|x| x.try_into_py(py)) @@ -174,8 +174,8 @@ pub mod py { } } - impl<'a> TryIntoPy for &'a str { - fn try_into_py(self, py: Python) -> PyResult { + impl<'a> TryIntoPy> for &'a str { + fn try_into_py(self, py: Python) -> PyResult> { self.into_py_any(py) } } diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index 8237cd0b..7fb3b740 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -35,7 +35,7 @@ mod py_error { impl<'a> From> for PyErr { fn from(e: ParserError) -> Self { - Python::with_gil(|py| { + Python::attach(|py| { let lines = match &e { ParserError::TokenizerError(_, text) | ParserError::ParserError(_, text) => { text.lines().collect::>() diff --git a/native/libcst/src/py.rs b/native/libcst/src/py.rs index 57da11e7..68c03744 100644 --- a/native/libcst/src/py.rs +++ b/native/libcst/src/py.rs @@ -11,21 +11,21 @@ use pyo3::prelude::*; pub fn libcst_native(_py: Python, m: &Bound) -> PyResult<()> { #[pyfn(m)] #[pyo3(signature = (source, encoding=None))] - fn parse_module(source: String, encoding: Option<&str>) -> PyResult { + fn parse_module(source: String, encoding: Option<&str>) -> PyResult> { let m = crate::parse_module(source.as_str(), encoding)?; - Python::with_gil(|py| m.try_into_py(py)) + Python::attach(|py| m.try_into_py(py)) } #[pyfn(m)] - fn parse_expression(source: String) -> PyResult { + fn parse_expression(source: String) -> PyResult> { let expr = crate::parse_expression(source.as_str())?; - Python::with_gil(|py| expr.try_into_py(py)) + Python::attach(|py| expr.try_into_py(py)) } #[pyfn(m)] - fn parse_statement(source: String) -> PyResult { + fn parse_statement(source: String) -> PyResult> { let stm = crate::parse_statement(source.as_str())?; - Python::with_gil(|py| stm.try_into_py(py)) + Python::attach(|py| stm.try_into_py(py)) } Ok(()) From b66c0e28229fd91ac145f1a56d7d884e6f8587b4 Mon Sep 17 00:00:00 2001 From: Frank Liu <94014427+frvnkliu@users.noreply.github.com> Date: Sun, 2 Nov 2025 17:27:32 -0800 Subject: [PATCH 629/632] [CodemodCommand] Make `transform_module` `supported_transforms` order deterministic by using `List` over `Dict` (#1424) --- libcst/codemod/_command.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/libcst/codemod/_command.py b/libcst/codemod/_command.py index 55a57247..3671a3fb 100644 --- a/libcst/codemod/_command.py +++ b/libcst/codemod/_command.py @@ -6,7 +6,7 @@ import argparse import inspect from abc import ABC, abstractmethod -from typing import Dict, Generator, List, Type, TypeVar +from typing import Dict, Generator, List, Tuple, Type, TypeVar from libcst import Module from libcst.codemod._codemod import Codemod @@ -75,13 +75,13 @@ class CodemodCommand(Codemod, ABC): # have a static method that other transforms can use which takes # a context and other optional args and modifies its own context key # accordingly. We import them here so that we don't have circular imports. - supported_transforms: Dict[str, Type[Codemod]] = { - AddImportsVisitor.CONTEXT_KEY: AddImportsVisitor, - RemoveImportsVisitor.CONTEXT_KEY: RemoveImportsVisitor, - } + supported_transforms: List[Tuple[str, Type[Codemod]]] = [ + (AddImportsVisitor.CONTEXT_KEY, AddImportsVisitor), + (RemoveImportsVisitor.CONTEXT_KEY, RemoveImportsVisitor), + ] # For any visitors that we support auto-running, run them here if needed. - for key, transform in supported_transforms.items(): + for key, transform in supported_transforms: if key in self.context.scratch: # We have work to do, so lets run this. tree = self._instantiate_and_run(transform, tree) From 9275a8bf7875d08659ce7b266860138bba633410 Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Mon, 3 Nov 2025 16:48:42 -0500 Subject: [PATCH 630/632] bump version to 1.8.6 (#1425) --- CHANGELOG.md | 13 +++++++++++++ MAINTAINERS.md | 7 ++++--- native/Cargo.lock | 4 ++-- native/libcst/Cargo.toml | 4 ++-- native/libcst_derive/Cargo.toml | 2 +- 5 files changed, 22 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9cd743d1..f72d53f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,16 @@ +# 1.8.6 - 2025-11-03 + +## What's Changed +* Update pyproject.toml for 3.14t by @itamaro in https://github.com/Instagram/LibCST/pull/1417 +* Update PyO3 to 0.26 by @cjwatson in https://github.com/Instagram/LibCST/pull/1413 +* Make CodemodCommand's supported_transforms order deterministic by @frvnkliu in https://github.com/Instagram/LibCST/pull/1424 + +## New Contributors +* @cjwatson made their first contribution in https://github.com/Instagram/LibCST/pull/1413 +* @frvnkliu made their first contribution in https://github.com/Instagram/LibCST/pull/1424 + +**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.5...v1.8.6 + # 1.8.5 - 2025-09-25 ## What's Changed diff --git a/MAINTAINERS.md b/MAINTAINERS.md index eb94a2a0..a7d79006 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -6,6 +6,7 @@ 1. Version bumps are generally not worth mentioning with some notable exceptions (like pyo3) 1. Group related PRs into one bullet point if it makes sense 2. manually bump versions in `Cargo.toml` files in the repo -3. make a new PR with the above changes, get it reviewed and landed -4. make a new release on Github, create a new tag on publish, and copy the contents of the changelog entry in there -5. after publishing, check out the repo at the new tag, and run `cd native; cargo +nightly publish -Z package-workspace -p libcst_derive -p libcst` \ No newline at end of file +3. run `cargo update -p libcst` +4. make a new PR with the above changes, get it reviewed and landed +5. make a new release on Github, create a new tag on publish, and copy the contents of the changelog entry in there +6. after publishing, check out the repo at the new tag, and run `cd native; cargo +nightly publish -Z package-workspace -p libcst_derive -p libcst` diff --git a/native/Cargo.lock b/native/Cargo.lock index 0ba138ea..16ffd999 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -286,7 +286,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libcst" -version = "1.8.5" +version = "1.8.6" dependencies = [ "annotate-snippets", "criterion", @@ -304,7 +304,7 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.8.5" +version = "1.8.6" dependencies = [ "quote", "syn", diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index 4fb029ca..e4c9f45f 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "libcst" -version = "1.8.5" +version = "1.8.6" authors = ["LibCST Developers"] edition = "2018" rust-version = "1.70" @@ -42,7 +42,7 @@ peg = "0.8.5" annotate-snippets = "0.11.5" regex = "1.11.2" memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.8.5" } +libcst_derive = { path = "../libcst_derive", version = "1.8.6" } [dev-dependencies] criterion = { version = "0.6.0", features = ["html_reports"] } diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index c054c32d..bf9959ab 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcst_derive" -version = "1.8.5" +version = "1.8.6" edition = "2018" description = "Proc macro helpers for libcst." license = "MIT" From b75343e74e48b33553c1fa3430677c03baef2b4f Mon Sep 17 00:00:00 2001 From: Frank Liu <94014427+frvnkliu@users.noreply.github.com> Date: Wed, 17 Dec 2025 09:28:24 -0800 Subject: [PATCH 631/632] Create CodemodCommand Remove/Add Import helper functions (#1432) * Create helper functions to abstract away usage of RemoveImportsVisitor's remove unused import functions in CodemodCommand * Create helper functions to abstract away usage of AddImportsVisitor's add needed import functions in CodemodCommand * Add tests for CodemodCommand helper functions Add comprehensive tests for the new helper methods: - remove_unused_import - remove_unused_import_by_node - add_needed_import Tests cover simple cases, from imports, aliased imports, relative imports, and combined add/remove operations. --- libcst/codemod/_command.py | 26 +- libcst/codemod/tests/test_command_helpers.py | 325 +++++++++++++++++++ 2 files changed, 350 insertions(+), 1 deletion(-) create mode 100644 libcst/codemod/tests/test_command_helpers.py diff --git a/libcst/codemod/_command.py b/libcst/codemod/_command.py index 3671a3fb..b7784d30 100644 --- a/libcst/codemod/_command.py +++ b/libcst/codemod/_command.py @@ -3,12 +3,14 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # +from __future__ import annotations + import argparse import inspect from abc import ABC, abstractmethod from typing import Dict, Generator, List, Tuple, Type, TypeVar -from libcst import Module +from libcst import CSTNode, Module from libcst.codemod._codemod import Codemod from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer @@ -65,6 +67,28 @@ class CodemodCommand(Codemod, ABC): """ ... + # Lightweight wrappers for RemoveImportsVisitor static functions + def remove_unused_import( + self, + module: str, + obj: str | None = None, + asname: str | None = None, + ) -> None: + RemoveImportsVisitor.remove_unused_import(self.context, module, obj, asname) + + def remove_unused_import_by_node(self, node: CSTNode) -> None: + RemoveImportsVisitor.remove_unused_import_by_node(self.context, node) + + # Lightweight wrappers for AddImportsVisitor static functions + def add_needed_import( + self, + module: str, + obj: str | None = None, + asname: str | None = None, + relative: int = 0, + ) -> None: + AddImportsVisitor.add_needed_import(self.context, module, obj, asname, relative) + def transform_module(self, tree: Module) -> Module: # Overrides (but then calls) Codemod's transform_module to provide # a spot where additional supported transforms can be attached and run. diff --git a/libcst/codemod/tests/test_command_helpers.py b/libcst/codemod/tests/test_command_helpers.py new file mode 100644 index 00000000..15c461b6 --- /dev/null +++ b/libcst/codemod/tests/test_command_helpers.py @@ -0,0 +1,325 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +from typing import Union + +import libcst as cst +from libcst.codemod import CodemodTest, VisitorBasedCodemodCommand + + +class TestRemoveUnusedImportHelper(CodemodTest): + """Tests for the remove_unused_import helper method in CodemodCommand.""" + + def test_remove_unused_import_simple(self) -> None: + """ + Test that remove_unused_import helper method works correctly. + """ + + class RemoveBarImport(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Use the helper method to schedule removal + self.remove_unused_import("bar") + + before = """ + import bar + import baz + + def foo() -> None: + pass + """ + after = """ + import baz + + def foo() -> None: + pass + """ + + self.TRANSFORM = RemoveBarImport + self.assertCodemod(before, after) + + def test_remove_unused_import_from_simple(self) -> None: + """ + Test that remove_unused_import helper method works correctly with from imports. + """ + + class RemoveBarFromImport(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Use the helper method to schedule removal + self.remove_unused_import("a.b.c", "bar") + + before = """ + from a.b.c import bar, baz + + def foo() -> None: + baz() + """ + after = """ + from a.b.c import baz + + def foo() -> None: + baz() + """ + + self.TRANSFORM = RemoveBarFromImport + self.assertCodemod(before, after) + + def test_remove_unused_import_with_alias(self) -> None: + """ + Test that remove_unused_import helper method works correctly with aliased imports. + """ + + class RemoveBarAsQuxImport(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Use the helper method to schedule removal + self.remove_unused_import("a.b.c", "bar", "qux") + + before = """ + from a.b.c import bar as qux, baz + + def foo() -> None: + baz() + """ + after = """ + from a.b.c import baz + + def foo() -> None: + baz() + """ + + self.TRANSFORM = RemoveBarAsQuxImport + self.assertCodemod(before, after) + + +class TestRemoveUnusedImportByNodeHelper(CodemodTest): + """Tests for the remove_unused_import_by_node helper method in CodemodCommand.""" + + def test_remove_unused_import_by_node_simple(self) -> None: + """ + Test that remove_unused_import_by_node helper method works correctly. + """ + + class RemoveBarCallAndImport(VisitorBasedCodemodCommand): + METADATA_DEPENDENCIES = ( + cst.metadata.QualifiedNameProvider, + cst.metadata.ScopeProvider, + ) + + def leave_SimpleStatementLine( + self, + original_node: cst.SimpleStatementLine, + updated_node: cst.SimpleStatementLine, + ) -> Union[cst.RemovalSentinel, cst.SimpleStatementLine]: + # Remove any statement that calls bar() + if cst.matchers.matches( + updated_node, + cst.matchers.SimpleStatementLine( + body=[cst.matchers.Expr(cst.matchers.Call())] + ), + ): + call = cst.ensure_type(updated_node.body[0], cst.Expr).value + if cst.matchers.matches( + call, cst.matchers.Call(func=cst.matchers.Name("bar")) + ): + # Use the helper method to remove imports referenced by this node + self.remove_unused_import_by_node(original_node) + return cst.RemoveFromParent() + return updated_node + + before = """ + from foo import bar, baz + + def fun() -> None: + bar() + baz() + """ + after = """ + from foo import baz + + def fun() -> None: + baz() + """ + + self.TRANSFORM = RemoveBarCallAndImport + self.assertCodemod(before, after) + + +class TestAddNeededImportHelper(CodemodTest): + """Tests for the add_needed_import helper method in CodemodCommand.""" + + def test_add_needed_import_simple(self) -> None: + """ + Test that add_needed_import helper method works correctly. + """ + + class AddBarImport(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Use the helper method to schedule import addition + self.add_needed_import("bar") + + before = """ + def foo() -> None: + pass + """ + after = """ + import bar + + def foo() -> None: + pass + """ + + self.TRANSFORM = AddBarImport + self.assertCodemod(before, after) + + def test_add_needed_import_from_simple(self) -> None: + """ + Test that add_needed_import helper method works correctly with from imports. + """ + + class AddBarFromImport(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Use the helper method to schedule import addition + self.add_needed_import("a.b.c", "bar") + + before = """ + def foo() -> None: + pass + """ + after = """ + from a.b.c import bar + + def foo() -> None: + pass + """ + + self.TRANSFORM = AddBarFromImport + self.assertCodemod(before, after) + + def test_add_needed_import_with_alias(self) -> None: + """ + Test that add_needed_import helper method works correctly with aliased imports. + """ + + class AddBarAsQuxImport(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Use the helper method to schedule import addition + self.add_needed_import("a.b.c", "bar", "qux") + + before = """ + def foo() -> None: + pass + """ + after = """ + from a.b.c import bar as qux + + def foo() -> None: + pass + """ + + self.TRANSFORM = AddBarAsQuxImport + self.assertCodemod(before, after) + + def test_add_needed_import_relative(self) -> None: + """ + Test that add_needed_import helper method works correctly with relative imports. + """ + + class AddRelativeImport(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Use the helper method to schedule relative import addition + self.add_needed_import("c", "bar", relative=2) + + before = """ + def foo() -> None: + pass + """ + after = """ + from ..c import bar + + def foo() -> None: + pass + """ + + self.TRANSFORM = AddRelativeImport + self.assertCodemod(before, after) + + +class TestCombinedHelpers(CodemodTest): + """Tests for combining add_needed_import and remove_unused_import helper methods.""" + + def test_add_and_remove_imports(self) -> None: + """ + Test that both helper methods work correctly when used together. + """ + + class ReplaceBarWithBaz(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Add new import and remove old one + self.add_needed_import("new_module", "baz") + self.remove_unused_import("old_module", "bar") + + before = """ + from other_module import qux + from old_module import bar + + def foo() -> None: + pass + """ + after = """ + from other_module import qux + from new_module import baz + + def foo() -> None: + pass + """ + + self.TRANSFORM = ReplaceBarWithBaz + self.assertCodemod(before, after) + + def test_add_and_remove_same_import(self) -> None: + """ + Test that both helper methods work correctly when used together. + """ + + class AddAndRemoveBar(VisitorBasedCodemodCommand): + def visit_Module(self, node: cst.Module) -> None: + # Add new import and remove old one + self.add_needed_import("hello_module", "bar") + self.remove_unused_import("hello_module", "bar") + + self.TRANSFORM = AddAndRemoveBar + + before = """ + from other_module import baz + + def foo() -> None: + pass + """ + # Should remain unchanged + self.assertCodemod(before, before) + + before = """ + from other_module import baz + from hello_module import bar + + def foo() -> None: + bar.func() + """ + self.assertCodemod(before, before) + + before = """ + from other_module import baz + from hello_module import bar + + def foo() -> None: + pass + """ + + after = """ + from other_module import baz + + def foo() -> None: + pass + """ + self.assertCodemod(before, after) From c5e40e87693cdd05f7620f4998830117f5720603 Mon Sep 17 00:00:00 2001 From: martin <48778384+drinkmorewaterr@users.noreply.github.com> Date: Wed, 17 Dec 2025 13:01:40 -0500 Subject: [PATCH 632/632] chore: remove macos-13 from ci (#1433) remove macos-13 from ci --- .github/workflows/build.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 742a4176..0df65636 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,10 +10,8 @@ jobs: strategy: fail-fast: false matrix: - # macos-13 is an intel runner, macos-latest is apple silicon os: [ - macos-13, macos-latest, ubuntu-latest, ubuntu-24.04-arm, @@ -32,10 +30,6 @@ jobs: with: python-version: "3.12" - uses: dtolnay/rust-toolchain@stable - - name: Set MACOSX_DEPLOYMENT_TARGET for Intel MacOS - if: matrix.os == 'macos-13' - run: >- - echo MACOSX_DEPLOYMENT_TARGET=10.12 >> $GITHUB_ENV - name: Disable scmtools local scheme if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >-