Issue #10386: Added __all__ to token module; this simplifies importing

in tokenize module and prevents leaking of private names through
import *.
This commit is contained in:
Alexander Belopolsky 2010-11-11 14:07:41 +00:00
parent bb27c128a5
commit b9d10d08c4
3 changed files with 14 additions and 13 deletions

View file

@ -33,9 +33,8 @@ from io import TextIOWrapper
cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
import token
__all__ = [x for x in dir(token) if not x.startswith("_")]
__all__.extend(["COMMENT", "tokenize", "detect_encoding", "NL", "untokenize",
"ENCODING", "TokenInfo"])
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
"NL", "untokenize", "ENCODING", "TokenInfo"]
del token
COMMENT = N_TOKENS