gh-71299: Fix __all__ in tokenize (#105907)

Co-authored-by: Unit03
This commit is contained in:
Lysandros Nikolaou 2023-06-19 13:31:57 +02:00 committed by GitHub
parent 581619941e
commit ab3823a97b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 137 additions and 135 deletions

View file

@ -41,7 +41,7 @@ blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII)
import token
__all__ = token.__all__ + ["tokenize", "generate_tokens", "detect_encoding",
"untokenize", "TokenInfo"]
"untokenize", "TokenInfo", "open", "TokenError"]
del token
class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line')):
@ -162,8 +162,6 @@ tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
class Untokenizer:
def __init__(self):