mirror of
https://github.com/python/cpython.git
synced 2025-07-07 19:35:27 +00:00
bpo-12486: Document tokenize.generate_tokens() as public API (#6957)
* Document tokenize.generate_tokens() * Add news file * Add test for generate_tokens * Document behaviour around ENCODING token * Add generate_tokens to __all__
This commit is contained in:
parent
c2745d2d05
commit
c56b17bd8c
4 changed files with 35 additions and 6 deletions
|
@ -37,7 +37,7 @@ cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII)
|
|||
blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII)
|
||||
|
||||
import token
|
||||
__all__ = token.__all__ + ["tokenize", "detect_encoding",
|
||||
__all__ = token.__all__ + ["tokenize", "generate_tokens", "detect_encoding",
|
||||
"untokenize", "TokenInfo"]
|
||||
del token
|
||||
|
||||
|
@ -653,9 +653,12 @@ def _tokenize(readline, encoding):
|
|||
yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
|
||||
|
||||
|
||||
# An undocumented, backwards compatible, API for all the places in the standard
|
||||
# library that expect to be able to use tokenize with strings
|
||||
def generate_tokens(readline):
|
||||
"""Tokenize a source reading Python code as unicode strings.
|
||||
|
||||
This has the same API as tokenize(), except that it expects the *readline*
|
||||
callable to return str objects instead of bytes.
|
||||
"""
|
||||
return _tokenize(readline, None)
|
||||
|
||||
def main():
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue