gh-108455: Run mypy on Tools/peg_generator (#108456)

Co-authored-by: Hugo van Kemenade <hugovk@users.noreply.github.com>
This commit is contained in:
Nikita Sobolev 2023-08-28 23:04:12 +03:00 committed by GitHub
parent f75cefd402
commit cf7ba83eb2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 23 additions and 11 deletions

View file

@ -9,6 +9,7 @@ on:
paths: paths:
- "Tools/clinic/**" - "Tools/clinic/**"
- "Tools/cases_generator/**" - "Tools/cases_generator/**"
- "Tools/peg_generator/**"
- "Tools/requirements-dev.txt" - "Tools/requirements-dev.txt"
- ".github/workflows/mypy.yml" - ".github/workflows/mypy.yml"
workflow_dispatch: workflow_dispatch:
@ -29,7 +30,11 @@ jobs:
mypy: mypy:
strategy: strategy:
matrix: matrix:
target: ["Tools/cases_generator", "Tools/clinic"] target: [
"Tools/cases_generator",
"Tools/clinic",
"Tools/peg_generator",
]
name: Run mypy on ${{ matrix.target }} name: Run mypy on ${{ matrix.target }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 10 timeout-minutes: 10

View file

@ -1,5 +1,6 @@
[mypy] [mypy]
files = pegen files = Tools/peg_generator/pegen
pretty = True
follow_imports = error follow_imports = error
no_implicit_optional = True no_implicit_optional = True
@ -24,3 +25,6 @@ show_error_codes = True
[mypy-pegen.grammar_parser] [mypy-pegen.grammar_parser]
strict_optional = False strict_optional = False
[mypy-setuptools.*]
ignore_missing_imports = True

View file

@ -5,7 +5,7 @@ import sys
import sysconfig import sysconfig
import tempfile import tempfile
import tokenize import tokenize
from typing import IO, Dict, List, Optional, Set, Tuple from typing import IO, Any, Dict, List, Optional, Set, Tuple
from pegen.c_generator import CParserGenerator from pegen.c_generator import CParserGenerator
from pegen.grammar import Grammar from pegen.grammar import Grammar
@ -18,6 +18,7 @@ from pegen.tokenizer import Tokenizer
MOD_DIR = pathlib.Path(__file__).resolve().parent MOD_DIR = pathlib.Path(__file__).resolve().parent
TokenDefinitions = Tuple[Dict[int, str], Dict[str, int], Set[str]] TokenDefinitions = Tuple[Dict[int, str], Dict[str, int], Set[str]]
Incomplete = Any # TODO: install `types-setuptools` and remove this alias
def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> List[str]: def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> List[str]:
@ -28,7 +29,7 @@ def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> List[
return f"{flags} {py_flags_nodist}".split() return f"{flags} {py_flags_nodist}".split()
def fixup_build_ext(cmd): def fixup_build_ext(cmd: Incomplete) -> None:
"""Function needed to make build_ext tests pass. """Function needed to make build_ext tests pass.
When Python was built with --enable-shared on Unix, -L. is not enough to When Python was built with --enable-shared on Unix, -L. is not enough to
@ -74,7 +75,7 @@ def compile_c_extension(
keep_asserts: bool = True, keep_asserts: bool = True,
disable_optimization: bool = False, disable_optimization: bool = False,
library_dir: Optional[str] = None, library_dir: Optional[str] = None,
) -> str: ) -> pathlib.Path:
"""Compile the generated source for a parser generator into an extension module. """Compile the generated source for a parser generator into an extension module.
The extension module will be generated in the same directory as the provided path The extension module will be generated in the same directory as the provided path

View file

@ -35,6 +35,7 @@ iskeyword = frozenset(kwlist).__contains__
issoftkeyword = frozenset(softkwlist).__contains__ issoftkeyword = frozenset(softkwlist).__contains__
'''.lstrip() '''.lstrip()
def main() -> None: def main() -> None:
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Generate the Lib/keywords.py file from the grammar." description="Generate the Lib/keywords.py file from the grammar."

View file

@ -10,7 +10,6 @@ from typing import Any, Callable, ClassVar, Dict, Optional, Tuple, Type, TypeVar
from pegen.tokenizer import Mark, Tokenizer, exact_token_types from pegen.tokenizer import Mark, Tokenizer, exact_token_types
T = TypeVar("T") T = TypeVar("T")
P = TypeVar("P", bound="Parser")
F = TypeVar("F", bound=Callable[..., Any]) F = TypeVar("F", bound=Callable[..., Any])
@ -21,7 +20,7 @@ def logger(method: F) -> F:
""" """
method_name = method.__name__ method_name = method.__name__
def logger_wrapper(self: P, *args: object) -> T: def logger_wrapper(self: "Parser", *args: object) -> Any:
if not self._verbose: if not self._verbose:
return method(self, *args) return method(self, *args)
argsr = ",".join(repr(arg) for arg in args) argsr = ",".join(repr(arg) for arg in args)
@ -41,7 +40,7 @@ def memoize(method: F) -> F:
"""Memoize a symbol method.""" """Memoize a symbol method."""
method_name = method.__name__ method_name = method.__name__
def memoize_wrapper(self: P, *args: object) -> T: def memoize_wrapper(self: "Parser", *args: object) -> Any:
mark = self._mark() mark = self._mark()
key = mark, method_name, args key = mark, method_name, args
# Fast path: cache hit, and not verbose. # Fast path: cache hit, and not verbose.
@ -74,11 +73,13 @@ def memoize(method: F) -> F:
return cast(F, memoize_wrapper) return cast(F, memoize_wrapper)
def memoize_left_rec(method: Callable[[P], Optional[T]]) -> Callable[[P], Optional[T]]: def memoize_left_rec(
method: Callable[["Parser"], Optional[T]]
) -> Callable[["Parser"], Optional[T]]:
"""Memoize a left-recursive symbol method.""" """Memoize a left-recursive symbol method."""
method_name = method.__name__ method_name = method.__name__
def memoize_left_rec_wrapper(self: P) -> Optional[T]: def memoize_left_rec_wrapper(self: "Parser") -> Optional[T]:
mark = self._mark() mark = self._mark()
key = mark, method_name, () key = mark, method_name, ()
# Fast path: cache hit, and not verbose. # Fast path: cache hit, and not verbose.

View file

@ -1,3 +1,3 @@
# Requirements file for external linters and checks we run on # Requirements file for external linters and checks we run on
# Tools/clinic and Tools/cases_generator/ in CI # Tools/clinic, Tools/cases_generator/, and Tools/peg_generator/ in CI
mypy==1.5.1 mypy==1.5.1