Close the file after tokenizing it. Because the open file object was

bound to a module global, the file object remained opened throughout
the test suite run.
This commit is contained in:
Tim Peters 2003-05-12 19:29:36 +00:00
parent 12d55a7caa
commit 11cb813598

View file

@ -3,7 +3,10 @@ import tokenize, os, sys
if verbose:
print 'starting...'
file = open(findfile('tokenize_tests'+os.extsep+'py'))
tokenize.tokenize(file.readline)
f = file(findfile('tokenize_tests'+os.extsep+'py'))
tokenize.tokenize(f.readline)
f.close()
if verbose:
print 'finished'