[3.12] gh-108303: Move tokenize-related data to Lib/test/tokenizedata (GH-109265) (#109677)

* gh-108303: Move tokenize-related data to Lib/test/tokenizedata (GH-109265)

(cherry picked from commit 1110c5bc82)

* gh-108303: Add `Lib/test/tokenizedata` to `TESTSUBDIRS` (#109314)

(cherry picked from commit 42ab2cbd7b)

---------

Co-authored-by: Nikita Sobolev <mail@sobolevn.me>
This commit is contained in:
Victor Stinner 2023-10-02 17:11:24 +02:00 committed by GitHub
parent 9bceb8a79b
commit ed4ffd7404
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 41 additions and 25 deletions

View file

@ -1198,7 +1198,7 @@ class TestTokenizerAdheresToPep0263(TestCase):
"""
def _testFile(self, filename):
path = os.path.join(os.path.dirname(__file__), filename)
path = os.path.join(os.path.dirname(__file__), 'tokenizedata', filename)
with open(path, 'rb') as f:
TestRoundtrip.check_roundtrip(self, f)
@ -1791,7 +1791,7 @@ class TestRoundtrip(TestCase):
self.check_roundtrip("if x == 1 : \n"
" print(x)\n")
fn = support.findfile("tokenize_tests.txt")
fn = support.findfile("tokenize_tests.txt", subdir="tokenizedata")
with open(fn, 'rb') as f:
self.check_roundtrip(f)
self.check_roundtrip("if x == 1:\n"
@ -1846,8 +1846,7 @@ class TestRoundtrip(TestCase):
# pass the '-ucpu' option to process the full directory.
import glob, random
fn = support.findfile("tokenize_tests.txt")
tempdir = os.path.dirname(fn) or os.curdir
tempdir = os.path.dirname(__file__) or os.curdir
testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py"))
# Tokenize is broken on test_pep3131.py because regular expressions are