gh-92651: Remove the Include/token.h header file (#92652)

Remove the token.h header file. There was never any public tokenizer
C API. The token.h header file was only designed to be used by Python
internals.

Move Include/token.h to Include/internal/pycore_token.h. Including
this header file now requires that the Py_BUILD_CORE macro is
defined. It no longer checks for the Py_LIMITED_API macro.

Rename functions:

* PyToken_OneChar() => _PyToken_OneChar()
* PyToken_TwoChars() => _PyToken_TwoChars()
* PyToken_ThreeChars() => _PyToken_ThreeChars()
This commit is contained in:
Victor Stinner 2022-05-11 23:22:50 +02:00 committed by GitHub
parent b69297ea23
commit da5727a120
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 52 additions and 39 deletions

View file

@ -139,3 +139,8 @@ Deprecated
Removed Removed
------- -------
* Remove the ``token.h`` header file. There was never any public tokenizer C
API. The ``token.h`` header file was only designed to be used by Python
internals.
(Contributed by Victor Stinner in :gh:`92651`.)

View file

@ -1,13 +1,16 @@
/* Auto-generated by Tools/scripts/generate_token.py */ /* Auto-generated by Tools/scripts/generate_token.py */
/* Token types */ /* Token types */
#ifndef Py_LIMITED_API #ifndef Py_INTERNAL_TOKEN_H
#ifndef Py_TOKEN_H #define Py_INTERNAL_TOKEN_H
#define Py_TOKEN_H
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
#ifndef Py_BUILD_CORE
# error "this header requires Py_BUILD_CORE define"
#endif
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */ #undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
#define ENDMARKER 0 #define ENDMARKER 0
@ -85,13 +88,13 @@ extern "C" {
(x) == DEDENT) (x) == DEDENT)
// Symbols exported for test_peg_generator
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */ PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
PyAPI_FUNC(int) PyToken_OneChar(int); PyAPI_FUNC(int) _PyToken_OneChar(int);
PyAPI_FUNC(int) PyToken_TwoChars(int, int); PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int); PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
#endif /* !Py_TOKEN_H */ #endif // !Py_INTERNAL_TOKEN_H
#endif /* Py_LIMITED_API */

View file

@ -1325,11 +1325,11 @@ regen-token:
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py rst \ $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py rst \
$(srcdir)/Grammar/Tokens \ $(srcdir)/Grammar/Tokens \
$(srcdir)/Doc/library/token-list.inc $(srcdir)/Doc/library/token-list.inc
# Regenerate Include/token.h from Grammar/Tokens # Regenerate Include/internal/pycore_token.h from Grammar/Tokens
# using Tools/scripts/generate_token.py # using Tools/scripts/generate_token.py
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py h \ $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py h \
$(srcdir)/Grammar/Tokens \ $(srcdir)/Grammar/Tokens \
$(srcdir)/Include/token.h $(srcdir)/Include/internal/pycore_token.h
# Regenerate Parser/token.c from Grammar/Tokens # Regenerate Parser/token.c from Grammar/Tokens
# using Tools/scripts/generate_token.py # using Tools/scripts/generate_token.py
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py c \ $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py c \
@ -1521,7 +1521,6 @@ PYTHON_HEADERS= \
$(srcdir)/Include/structmember.h \ $(srcdir)/Include/structmember.h \
$(srcdir)/Include/structseq.h \ $(srcdir)/Include/structseq.h \
$(srcdir)/Include/sysmodule.h \ $(srcdir)/Include/sysmodule.h \
$(srcdir)/Include/token.h \
$(srcdir)/Include/traceback.h \ $(srcdir)/Include/traceback.h \
$(srcdir)/Include/tracemalloc.h \ $(srcdir)/Include/tracemalloc.h \
$(srcdir)/Include/tupleobject.h \ $(srcdir)/Include/tupleobject.h \
@ -1632,6 +1631,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_structseq.h \ $(srcdir)/Include/internal/pycore_structseq.h \
$(srcdir)/Include/internal/pycore_symtable.h \ $(srcdir)/Include/internal/pycore_symtable.h \
$(srcdir)/Include/internal/pycore_sysmodule.h \ $(srcdir)/Include/internal/pycore_sysmodule.h \
$(srcdir)/Include/internal/pycore_token.h \
$(srcdir)/Include/internal/pycore_traceback.h \ $(srcdir)/Include/internal/pycore_traceback.h \
$(srcdir)/Include/internal/pycore_tuple.h \ $(srcdir)/Include/internal/pycore_tuple.h \
$(srcdir)/Include/internal/pycore_typeobject.h \ $(srcdir)/Include/internal/pycore_typeobject.h \

View file

@ -0,0 +1,3 @@
Remove the ``token.h`` header file. There was never any public tokenizer C
API. The ``token.h`` header file was only designed to be used by Python
internals. Patch by Victor Stinner.

View file

@ -244,6 +244,7 @@
<ClInclude Include="..\Include\internal\pycore_structseq.h" /> <ClInclude Include="..\Include\internal\pycore_structseq.h" />
<ClInclude Include="..\Include\internal\pycore_sysmodule.h" /> <ClInclude Include="..\Include\internal\pycore_sysmodule.h" />
<ClInclude Include="..\Include\internal\pycore_symtable.h" /> <ClInclude Include="..\Include\internal\pycore_symtable.h" />
<ClInclude Include="..\Include\internal\pycore_token.h" />
<ClInclude Include="..\Include\internal\pycore_traceback.h" /> <ClInclude Include="..\Include\internal\pycore_traceback.h" />
<ClInclude Include="..\Include\internal\pycore_tuple.h" /> <ClInclude Include="..\Include\internal\pycore_tuple.h" />
<ClInclude Include="..\Include\internal\pycore_typeobject.h" /> <ClInclude Include="..\Include\internal\pycore_typeobject.h" />
@ -291,7 +292,6 @@
<ClInclude Include="..\Include\structseq.h" /> <ClInclude Include="..\Include\structseq.h" />
<ClInclude Include="..\Include\symtable.h" /> <ClInclude Include="..\Include\symtable.h" />
<ClInclude Include="..\Include\sysmodule.h" /> <ClInclude Include="..\Include\sysmodule.h" />
<ClInclude Include="..\Include\token.h" />
<ClInclude Include="..\Include\traceback.h" /> <ClInclude Include="..\Include\traceback.h" />
<ClInclude Include="..\Include\tracemalloc.h" /> <ClInclude Include="..\Include\tracemalloc.h" />
<ClInclude Include="..\Include\tupleobject.h" /> <ClInclude Include="..\Include\tupleobject.h" />

View file

@ -213,9 +213,6 @@
<ClInclude Include="..\Include\sysmodule.h"> <ClInclude Include="..\Include\sysmodule.h">
<Filter>Include</Filter> <Filter>Include</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="..\Include\token.h">
<Filter>Include</Filter>
</ClInclude>
<ClInclude Include="..\Include\traceback.h"> <ClInclude Include="..\Include\traceback.h">
<Filter>Include</Filter> <Filter>Include</Filter>
</ClInclude> </ClInclude>
@ -633,6 +630,9 @@
<ClInclude Include="..\Include\internal\pycore_symtable.h"> <ClInclude Include="..\Include\internal\pycore_symtable.h">
<Filter>Include\internal</Filter> <Filter>Include\internal</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="..\Include\internal\pycore_token.h">
<Filter>Include\internal</Filter>
</ClInclude>
<ClInclude Include="..\Include\internal\pycore_traceback.h"> <ClInclude Include="..\Include\internal\pycore_traceback.h">
<Filter>Include\internal</Filter> <Filter>Include\internal</Filter>
</ClInclude> </ClInclude>

View file

@ -19,7 +19,7 @@
<_TokenOutputs Include="$(PySourcePath)Doc\library\token-list.inc"> <_TokenOutputs Include="$(PySourcePath)Doc\library\token-list.inc">
<Format>rst</Format> <Format>rst</Format>
</_TokenOutputs> </_TokenOutputs>
<_TokenOutputs Include="$(PySourcePath)Include\token.h"> <_TokenOutputs Include="$(PySourcePath)Include\internal\pycore_token.h">
<Format>h</Format> <Format>h</Format>
</_TokenOutputs> </_TokenOutputs>
<_TokenOutputs Include="$(PySourcePath)Parser\token.c"> <_TokenOutputs Include="$(PySourcePath)Parser\token.c">

View file

@ -3,8 +3,8 @@
#define PY_SSIZE_T_CLEAN #define PY_SSIZE_T_CLEAN
#include <Python.h> #include <Python.h>
#include <token.h>
#include <pycore_ast.h> #include <pycore_ast.h>
#include <pycore_token.h>
#if 0 #if 0
#define PyPARSE_YIELD_IS_KEYWORD 0x0001 #define PyPARSE_YIELD_IS_KEYWORD 0x0001

8
Parser/token.c generated
View file

@ -1,7 +1,7 @@
/* Auto-generated by Tools/scripts/generate_token.py */ /* Auto-generated by Tools/scripts/generate_token.py */
#include "Python.h" #include "Python.h"
#include "token.h" #include "pycore_token.h"
/* Token names */ /* Token names */
@ -76,7 +76,7 @@ const char * const _PyParser_TokenNames[] = {
/* Return the token corresponding to a single character */ /* Return the token corresponding to a single character */
int int
PyToken_OneChar(int c1) _PyToken_OneChar(int c1)
{ {
switch (c1) { switch (c1) {
case '%': return PERCENT; case '%': return PERCENT;
@ -107,7 +107,7 @@ PyToken_OneChar(int c1)
} }
int int
PyToken_TwoChars(int c1, int c2) _PyToken_TwoChars(int c1, int c2)
{ {
switch (c1) { switch (c1) {
case '!': case '!':
@ -191,7 +191,7 @@ PyToken_TwoChars(int c1, int c2)
} }
int int
PyToken_ThreeChars(int c1, int c2, int c3) _PyToken_ThreeChars(int c1, int c2, int c3)
{ {
switch (c1) { switch (c1) {
case '*': case '*':

View file

@ -1992,10 +1992,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
/* Check for two-character token */ /* Check for two-character token */
{ {
int c2 = tok_nextc(tok); int c2 = tok_nextc(tok);
int token = PyToken_TwoChars(c, c2); int token = _PyToken_TwoChars(c, c2);
if (token != OP) { if (token != OP) {
int c3 = tok_nextc(tok); int c3 = tok_nextc(tok);
int token3 = PyToken_ThreeChars(c, c2, c3); int token3 = _PyToken_ThreeChars(c, c2, c3);
if (token3 != OP) { if (token3 != OP) {
token = token3; token = token3;
} }
@ -2059,7 +2059,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
/* Punctuation character */ /* Punctuation character */
*p_start = tok->start; *p_start = tok->start;
*p_end = tok->cur; *p_end = tok->cur;
return PyToken_OneChar(c); return _PyToken_OneChar(c);
} }
int int

View file

@ -8,7 +8,7 @@ extern "C" {
/* Tokenizer interface */ /* Tokenizer interface */
#include "token.h" /* For token types */ #include "pycore_token.h" /* For token types */
#define MAXINDENT 100 /* Max indentation level */ #define MAXINDENT 100 /* Max indentation level */
#define MAXLEVEL 200 /* Max parentheses level */ #define MAXLEVEL 200 /* Max parentheses level */

View file

@ -24,7 +24,6 @@
#include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_sysmodule.h" // _PySys_Audit()
#include "pycore_traceback.h" // _PyTraceBack_Print_Indented() #include "pycore_traceback.h" // _PyTraceBack_Print_Indented()
#include "token.h" // INDENT
#include "errcode.h" // E_EOF #include "errcode.h" // E_EOF
#include "marshal.h" // PyMarshal_ReadLongFromFile() #include "marshal.h" // PyMarshal_ReadLongFromFile()

View file

@ -51,13 +51,16 @@ token_h_template = """\
/* Auto-generated by Tools/scripts/generate_token.py */ /* Auto-generated by Tools/scripts/generate_token.py */
/* Token types */ /* Token types */
#ifndef Py_LIMITED_API #ifndef Py_INTERNAL_TOKEN_H
#ifndef Py_TOKEN_H #define Py_INTERNAL_TOKEN_H
#define Py_TOKEN_H
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
#ifndef Py_BUILD_CORE
# error "this header requires Py_BUILD_CORE define"
#endif
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */ #undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
%s\ %s\
@ -75,19 +78,19 @@ extern "C" {
(x) == DEDENT) (x) == DEDENT)
// Symbols exported for test_peg_generator
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */ PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
PyAPI_FUNC(int) PyToken_OneChar(int); PyAPI_FUNC(int) _PyToken_OneChar(int);
PyAPI_FUNC(int) PyToken_TwoChars(int, int); PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int); PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
#endif /* !Py_TOKEN_H */ #endif // !Py_INTERNAL_TOKEN_H
#endif /* Py_LIMITED_API */
""" """
def make_h(infile, outfile='Include/token.h'): def make_h(infile, outfile='Include/internal/pycore_token.h'):
tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile) tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile)
defines = [] defines = []
@ -106,7 +109,7 @@ token_c_template = """\
/* Auto-generated by Tools/scripts/generate_token.py */ /* Auto-generated by Tools/scripts/generate_token.py */
#include "Python.h" #include "Python.h"
#include "token.h" #include "pycore_token.h"
/* Token names */ /* Token names */
@ -117,21 +120,21 @@ const char * const _PyParser_TokenNames[] = {
/* Return the token corresponding to a single character */ /* Return the token corresponding to a single character */
int int
PyToken_OneChar(int c1) _PyToken_OneChar(int c1)
{ {
%s\ %s\
return OP; return OP;
} }
int int
PyToken_TwoChars(int c1, int c2) _PyToken_TwoChars(int c1, int c2)
{ {
%s\ %s\
return OP; return OP;
} }
int int
PyToken_ThreeChars(int c1, int c2, int c3) _PyToken_ThreeChars(int c1, int c2, int c3)
{ {
%s\ %s\
return OP; return OP;