mirror of
https://github.com/python/cpython.git
synced 2025-08-25 11:15:02 +00:00
SF bug #1224621: tokenize module does not detect inconsistent dedents
This commit is contained in:
parent
8fa7eb563b
commit
da99d1cbfe
3 changed files with 25 additions and 1 deletions
|
@ -1,4 +1,4 @@
|
||||||
from test.test_support import verbose, findfile, is_resource_enabled
|
from test.test_support import verbose, findfile, is_resource_enabled, TestFailed
|
||||||
import os, glob, random
|
import os, glob, random
|
||||||
from tokenize import (tokenize, generate_tokens, untokenize,
|
from tokenize import (tokenize, generate_tokens, untokenize,
|
||||||
NUMBER, NAME, OP, STRING)
|
NUMBER, NAME, OP, STRING)
|
||||||
|
@ -41,6 +41,24 @@ for f in testfiles:
|
||||||
test_roundtrip(f)
|
test_roundtrip(f)
|
||||||
|
|
||||||
|
|
||||||
|
###### Test detecton of IndentationError ######################
|
||||||
|
|
||||||
|
from cStringIO import StringIO
|
||||||
|
|
||||||
|
sampleBadText = """
|
||||||
|
def foo():
|
||||||
|
bar
|
||||||
|
baz
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
for tok in generate_tokens(StringIO(sampleBadText).readline):
|
||||||
|
pass
|
||||||
|
except IndentationError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise TestFailed("Did not detect IndentationError:")
|
||||||
|
|
||||||
|
|
||||||
###### Test example in the docs ###############################
|
###### Test example in the docs ###############################
|
||||||
|
|
||||||
|
|
|
@ -271,6 +271,9 @@ def generate_tokens(readline):
|
||||||
indents.append(column)
|
indents.append(column)
|
||||||
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
|
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
|
||||||
while column < indents[-1]:
|
while column < indents[-1]:
|
||||||
|
if column not in indents:
|
||||||
|
raise IndentationError(
|
||||||
|
"unindent does not match any outer indentation level")
|
||||||
indents = indents[:-1]
|
indents = indents[:-1]
|
||||||
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
|
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
|
||||||
|
|
||||||
|
|
|
@ -147,6 +147,9 @@ Extension Modules
|
||||||
Library
|
Library
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
- The tokenize module now detects and reports indentation errors.
|
||||||
|
Bug #1224621.
|
||||||
|
|
||||||
- The tokenize module has a new untokenize() function to support a full
|
- The tokenize module has a new untokenize() function to support a full
|
||||||
roundtrip from lexed tokens back to Python sourcecode. In addition,
|
roundtrip from lexed tokens back to Python sourcecode. In addition,
|
||||||
the generate_tokens() function now accepts a callable argument that
|
the generate_tokens() function now accepts a callable argument that
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue