Skip to content

gh-92651: Remove the Include/token.h header file #92652

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 11, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions Doc/whatsnew/3.12.rst
Original file line number Diff line number Diff line change
Expand Up @@ -139,3 +139,8 @@ Deprecated

Removed
-------

* Remove the ``token.h`` header file. There was never any public tokenizer C
API. The ``token.h`` header file was only designed to be used by Python
internals.
(Contributed by Victor Stinner in :gh:`92651`.)
19 changes: 11 additions & 8 deletions Include/token.h → Include/internal/pycore_token.h
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
/* Auto-generated by Tools/scripts/generate_token.py */

/* Token types */
#ifndef Py_LIMITED_API
#ifndef Py_TOKEN_H
#define Py_TOKEN_H
#ifndef Py_INTERNAL_TOKEN_H
#define Py_INTERNAL_TOKEN_H
#ifdef __cplusplus
extern "C" {
#endif

#ifndef Py_BUILD_CORE
# error "this header requires Py_BUILD_CORE define"
#endif

#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */

#define ENDMARKER 0
Expand Down Expand Up @@ -85,13 +88,13 @@ extern "C" {
(x) == DEDENT)


// Symbols exported for test_peg_generator
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
PyAPI_FUNC(int) PyToken_OneChar(int);
PyAPI_FUNC(int) PyToken_TwoChars(int, int);
PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
PyAPI_FUNC(int) _PyToken_OneChar(int);
PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);

#ifdef __cplusplus
}
#endif
#endif /* !Py_TOKEN_H */
#endif /* Py_LIMITED_API */
#endif // !Py_INTERNAL_TOKEN_H
6 changes: 3 additions & 3 deletions Makefile.pre.in
Original file line number Diff line number Diff line change
Expand Up @@ -1325,11 +1325,11 @@ regen-token:
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py rst \
$(srcdir)/Grammar/Tokens \
$(srcdir)/Doc/library/token-list.inc
# Regenerate Include/token.h from Grammar/Tokens
# Regenerate Include/internal/pycore_token.h from Grammar/Tokens
# using Tools/scripts/generate_token.py
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py h \
$(srcdir)/Grammar/Tokens \
$(srcdir)/Include/token.h
$(srcdir)/Include/internal/pycore_token.h
# Regenerate Parser/token.c from Grammar/Tokens
# using Tools/scripts/generate_token.py
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py c \
Expand Down Expand Up @@ -1521,7 +1521,6 @@ PYTHON_HEADERS= \
$(srcdir)/Include/structmember.h \
$(srcdir)/Include/structseq.h \
$(srcdir)/Include/sysmodule.h \
$(srcdir)/Include/token.h \
$(srcdir)/Include/traceback.h \
$(srcdir)/Include/tracemalloc.h \
$(srcdir)/Include/tupleobject.h \
Expand Down Expand Up @@ -1632,6 +1631,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_structseq.h \
$(srcdir)/Include/internal/pycore_symtable.h \
$(srcdir)/Include/internal/pycore_sysmodule.h \
$(srcdir)/Include/internal/pycore_token.h \
$(srcdir)/Include/internal/pycore_traceback.h \
$(srcdir)/Include/internal/pycore_tuple.h \
$(srcdir)/Include/internal/pycore_typeobject.h \
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Remove the ``token.h`` header file. There was never any public tokenizer C
API. The ``token.h`` header file was only designed to be used by Python
internals. Patch by Victor Stinner.
2 changes: 1 addition & 1 deletion PCbuild/pythoncore.vcxproj
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,7 @@
<ClInclude Include="..\Include\internal\pycore_structseq.h" />
<ClInclude Include="..\Include\internal\pycore_sysmodule.h" />
<ClInclude Include="..\Include\internal\pycore_symtable.h" />
<ClInclude Include="..\Include\internal\pycore_token.h" />
<ClInclude Include="..\Include\internal\pycore_traceback.h" />
<ClInclude Include="..\Include\internal\pycore_tuple.h" />
<ClInclude Include="..\Include\internal\pycore_typeobject.h" />
Expand Down Expand Up @@ -291,7 +292,6 @@
<ClInclude Include="..\Include\structseq.h" />
<ClInclude Include="..\Include\symtable.h" />
<ClInclude Include="..\Include\sysmodule.h" />
<ClInclude Include="..\Include\token.h" />
<ClInclude Include="..\Include\traceback.h" />
<ClInclude Include="..\Include\tracemalloc.h" />
<ClInclude Include="..\Include\tupleobject.h" />
Expand Down
6 changes: 3 additions & 3 deletions PCbuild/pythoncore.vcxproj.filters
Original file line number Diff line number Diff line change
Expand Up @@ -213,9 +213,6 @@
<ClInclude Include="..\Include\sysmodule.h">
<Filter>Include</Filter>
</ClInclude>
<ClInclude Include="..\Include\token.h">
<Filter>Include</Filter>
</ClInclude>
<ClInclude Include="..\Include\traceback.h">
<Filter>Include</Filter>
</ClInclude>
Expand Down Expand Up @@ -633,6 +630,9 @@
<ClInclude Include="..\Include\internal\pycore_symtable.h">
<Filter>Include\internal</Filter>
</ClInclude>
<ClInclude Include="..\Include\internal\pycore_token.h">
<Filter>Include\internal</Filter>
</ClInclude>
<ClInclude Include="..\Include\internal\pycore_traceback.h">
<Filter>Include\internal</Filter>
</ClInclude>
Expand Down
2 changes: 1 addition & 1 deletion PCbuild/regen.targets
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
<_TokenOutputs Include="$(PySourcePath)Doc\library\token-list.inc">
<Format>rst</Format>
</_TokenOutputs>
<_TokenOutputs Include="$(PySourcePath)Include\token.h">
<_TokenOutputs Include="$(PySourcePath)Include\internal\pycore_token.h">
<Format>h</Format>
</_TokenOutputs>
<_TokenOutputs Include="$(PySourcePath)Parser\token.c">
Expand Down
2 changes: 1 addition & 1 deletion Parser/pegen.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@

#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include <token.h>
#include <pycore_ast.h>
#include <pycore_token.h>

#if 0
#define PyPARSE_YIELD_IS_KEYWORD 0x0001
Expand Down
8 changes: 4 additions & 4 deletions Parser/token.c

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions Parser/tokenizer.c
Original file line number Diff line number Diff line change
Expand Up @@ -1992,10 +1992,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
/* Check for two-character token */
{
int c2 = tok_nextc(tok);
int token = PyToken_TwoChars(c, c2);
int token = _PyToken_TwoChars(c, c2);
if (token != OP) {
int c3 = tok_nextc(tok);
int token3 = PyToken_ThreeChars(c, c2, c3);
int token3 = _PyToken_ThreeChars(c, c2, c3);
if (token3 != OP) {
token = token3;
}
Expand Down Expand Up @@ -2059,7 +2059,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
/* Punctuation character */
*p_start = tok->start;
*p_end = tok->cur;
return PyToken_OneChar(c);
return _PyToken_OneChar(c);
}

int
Expand Down
2 changes: 1 addition & 1 deletion Parser/tokenizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ extern "C" {

/* Tokenizer interface */

#include "token.h" /* For token types */
#include "pycore_token.h" /* For token types */

#define MAXINDENT 100 /* Max indentation level */
#define MAXLEVEL 200 /* Max parentheses level */
Expand Down
1 change: 0 additions & 1 deletion Python/pythonrun.c
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
#include "pycore_sysmodule.h" // _PySys_Audit()
#include "pycore_traceback.h" // _PyTraceBack_Print_Indented()

#include "token.h" // INDENT
#include "errcode.h" // E_EOF
#include "marshal.h" // PyMarshal_ReadLongFromFile()

Expand Down
29 changes: 16 additions & 13 deletions Tools/scripts/generate_token.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,16 @@ def update_file(file, content):
/* Auto-generated by Tools/scripts/generate_token.py */

/* Token types */
#ifndef Py_LIMITED_API
#ifndef Py_TOKEN_H
#define Py_TOKEN_H
#ifndef Py_INTERNAL_TOKEN_H
#define Py_INTERNAL_TOKEN_H
#ifdef __cplusplus
extern "C" {
#endif

#ifndef Py_BUILD_CORE
# error "this header requires Py_BUILD_CORE define"
#endif

#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */

%s\
Expand All @@ -75,19 +78,19 @@ def update_file(file, content):
(x) == DEDENT)


// Symbols exported for test_peg_generator
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
PyAPI_FUNC(int) PyToken_OneChar(int);
PyAPI_FUNC(int) PyToken_TwoChars(int, int);
PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
PyAPI_FUNC(int) _PyToken_OneChar(int);
PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);

#ifdef __cplusplus
}
#endif
#endif /* !Py_TOKEN_H */
#endif /* Py_LIMITED_API */
#endif // !Py_INTERNAL_TOKEN_H
"""

def make_h(infile, outfile='Include/token.h'):
def make_h(infile, outfile='Include/internal/pycore_token.h'):
tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile)

defines = []
Expand All @@ -106,7 +109,7 @@ def make_h(infile, outfile='Include/token.h'):
/* Auto-generated by Tools/scripts/generate_token.py */

#include "Python.h"
#include "token.h"
#include "pycore_token.h"

/* Token names */

Expand All @@ -117,21 +120,21 @@ def make_h(infile, outfile='Include/token.h'):
/* Return the token corresponding to a single character */

int
PyToken_OneChar(int c1)
_PyToken_OneChar(int c1)
{
%s\
return OP;
}

int
PyToken_TwoChars(int c1, int c2)
_PyToken_TwoChars(int c1, int c2)
{
%s\
return OP;
}

int
PyToken_ThreeChars(int c1, int c2, int c3)
_PyToken_ThreeChars(int c1, int c2, int c3)
{
%s\
return OP;
Expand Down