gh-108303: Move tokenize-related data to Lib/test/tokenizedata (GH-109265)

This commit is contained in:
Nikita Sobolev 2023-09-12 09:37:42 +03:00 committed by GitHub
parent 8c813faf86
commit 1110c5bc82
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 40 additions and 25 deletions

View file

@ -1200,7 +1200,7 @@ class TestTokenizerAdheresToPep0263(TestCase):
"""
def _testFile(self, filename):
path = os.path.join(os.path.dirname(__file__), filename)
path = os.path.join(os.path.dirname(__file__), 'tokenizedata', filename)
with open(path, 'rb') as f:
TestRoundtrip.check_roundtrip(self, f)
@ -1794,7 +1794,7 @@ class TestRoundtrip(TestCase):
self.check_roundtrip("if x == 1 : \n"
" print(x)\n")
fn = support.findfile("tokenize_tests.txt")
fn = support.findfile("tokenize_tests.txt", subdir="tokenizedata")
with open(fn, 'rb') as f:
self.check_roundtrip(f)
self.check_roundtrip("if x == 1:\n"
@ -1849,8 +1849,7 @@ class TestRoundtrip(TestCase):
# pass the '-ucpu' option to process the full directory.
import glob, random
fn = support.findfile("tokenize_tests.txt")
tempdir = os.path.dirname(fn) or os.curdir
tempdir = os.path.dirname(__file__) or os.curdir
testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py"))
# Tokenize is broken on test_pep3131.py because regular expressions are