bpo-30455: Generate all token related code and docs from Grammar/Tokens. (GH-10370)

"Include/token.h", "Lib/token.py" (containing now some data moved from
"Lib/tokenize.py") and new files "Parser/token.c" (containing the code
moved from "Parser/tokenizer.c") and "Doc/library/token-list.inc" (included
in "Doc/library/token.rst") are now generated from "Grammar/Tokens" by
"Tools/scripts/generate_token.py". The script overwrites files only if
needed and can be used on the read-only sources tree.

"Lib/symbol.py" is now generated by "Tools/scripts/generate_symbol_py.py"
instead of been executable itself.

Added new make targets "regen-token" and "regen-symbol" which are now
dependencies of "regen-all".

The documentation contains now strings for operators and punctuation tokens.
This commit is contained in:
Serhiy Storchaka 2018-12-22 11:18:40 +02:00 committed by GitHub
parent c1b4b0f616
commit 8ac658114d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 940 additions and 462 deletions

View file

@ -6,6 +6,9 @@ import subprocess
SYMBOL_FILE = support.findfile('symbol.py')
GEN_SYMBOL_FILE = os.path.join(os.path.dirname(__file__),
'..', '..', 'Tools', 'scripts',
'generate_symbol_py.py')
GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
'..', '..', 'Include', 'graminit.h')
TEST_PY_FILE = 'symbol_test.py'
@ -22,7 +25,7 @@ class TestSymbolGeneration(unittest.TestCase):
def _generate_symbols(self, grammar_file, target_symbol_py_file):
proc = subprocess.Popen([sys.executable,
SYMBOL_FILE,
GEN_SYMBOL_FILE,
grammar_file,
target_symbol_py_file], stderr=subprocess.PIPE)
stderr = proc.communicate()[1]