mirror of
https://github.com/python/cpython.git
synced 2025-08-04 17:08:35 +00:00
SF 633560: tokenize.__all__ needs "generate_tokens"
This commit is contained in:
parent
a326f47a13
commit
78a7aeeb1a
1 changed files with 2 additions and 1 deletions
|
@ -30,7 +30,8 @@ import string, re
|
|||
from token import *
|
||||
|
||||
import token
|
||||
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", "NL"]
|
||||
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
|
||||
"generate_tokens", "NL"]
|
||||
del x
|
||||
del token
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue