mirror of
https://github.com/python/cpython.git
synced 2025-07-07 19:35:27 +00:00
bpo-30377: Simplify handling of COMMENT and NL in tokenize.py (#1607)
This commit is contained in:
parent
a17a2f52c4
commit
c471ca448c
2 changed files with 12 additions and 12 deletions
|
@ -560,13 +560,11 @@ def _tokenize(readline, encoding):
|
|||
if line[pos] in '#\r\n': # skip comments or blank lines
|
||||
if line[pos] == '#':
|
||||
comment_token = line[pos:].rstrip('\r\n')
|
||||
nl_pos = pos + len(comment_token)
|
||||
yield TokenInfo(COMMENT, comment_token,
|
||||
(lnum, pos), (lnum, pos + len(comment_token)), line)
|
||||
yield TokenInfo(NL, line[nl_pos:],
|
||||
(lnum, nl_pos), (lnum, len(line)), line)
|
||||
else:
|
||||
yield TokenInfo((NL, COMMENT)[line[pos] == '#'], line[pos:],
|
||||
pos += len(comment_token)
|
||||
|
||||
yield TokenInfo(NL, line[pos:],
|
||||
(lnum, pos), (lnum, len(line)), line)
|
||||
continue
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue