Issue #11074: Make 'tokenize' so it can be reloaded.

The module stored away the 'open' object as found in the global namespace
(which fell through to the built-in namespace) since it defined its own 'open'.
Problem is that if you reloaded the module it then grabbed the 'open' defined
in the previous load, leading to code that infinite recursed. Switched to
simply call builtins.open directly.
This commit is contained in:
Brett Cannon 2011-02-22 03:25:12 +00:00
parent eeb114b028
commit f3042782af
2 changed files with 4 additions and 3 deletions

View file

@ -24,6 +24,7 @@ __author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
'Michael Foord')
import builtins
import re
import sys
from token import *
@ -335,13 +336,11 @@ def detect_encoding(readline):
return default, [first, second]
_builtin_open = open
def open(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
buffer = _builtin_open(filename, 'rb')
buffer = builtins.open(filename, 'rb')
encoding, lines = detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)