Skip to content

Commit 8d99502

Browse files
vstinnersobolevn
andauthored
[3.11] gh-108303: Move tokenize-related data to Lib/test/tokenizedata (GH-109265) (#109678)
* gh-108303: Move tokenize-related data to Lib/test/tokenizedata (GH-109265) (cherry picked from commit 1110c5b) * gh-108303: Add `Lib/test/tokenizedata` to `TESTSUBDIRS` (#109314) (cherry picked from commit 42ab2cb) --------- Co-authored-by: Nikita Sobolev <[email protected]>
1 parent 37b2617 commit 8d99502

18 files changed

+40
-24
lines changed

.gitattributes

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ PC/classicAppCompat.* binary
2525
[attr]noeol -text
2626

2727
Lib/test/cjkencodings/* noeol
28-
Lib/test/coding20731.py noeol
28+
Lib/test/tokenizedata/coding20731.py noeol
2929
Lib/test/decimaltestdata/*.decTest noeol
3030
Lib/test/test_email/data/*.txt noeol
3131
Lib/test/test_importlib/data01/* noeol

Lib/test/test_py_compile.py

+12-4
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,9 @@ def test_exceptions_propagate(self):
132132
os.chmod(self.directory, mode.st_mode)
133133

134134
def test_bad_coding(self):
135-
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
135+
bad_coding = os.path.join(os.path.dirname(__file__),
136+
'tokenizedata',
137+
'bad_coding2.py')
136138
with support.captured_stderr():
137139
self.assertIsNone(py_compile.compile(bad_coding, doraise=False))
138140
self.assertFalse(os.path.exists(
@@ -195,7 +197,9 @@ def test_invalidation_mode(self):
195197
self.assertEqual(flags, 0b1)
196198

197199
def test_quiet(self):
198-
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
200+
bad_coding = os.path.join(os.path.dirname(__file__),
201+
'tokenizedata',
202+
'bad_coding2.py')
199203
with support.captured_stderr() as stderr:
200204
self.assertIsNone(py_compile.compile(bad_coding, doraise=False, quiet=2))
201205
self.assertIsNone(py_compile.compile(bad_coding, doraise=True, quiet=2))
@@ -259,14 +263,18 @@ def test_with_files(self):
259263
self.assertTrue(os.path.exists(self.cache_path))
260264

261265
def test_bad_syntax(self):
262-
bad_syntax = os.path.join(os.path.dirname(__file__), 'badsyntax_3131.py')
266+
bad_syntax = os.path.join(os.path.dirname(__file__),
267+
'tokenizedata',
268+
'badsyntax_3131.py')
263269
rc, stdout, stderr = self.pycompilecmd_failure(bad_syntax)
264270
self.assertEqual(rc, 1)
265271
self.assertEqual(stdout, b'')
266272
self.assertIn(b'SyntaxError', stderr)
267273

268274
def test_bad_syntax_with_quiet(self):
269-
bad_syntax = os.path.join(os.path.dirname(__file__), 'badsyntax_3131.py')
275+
bad_syntax = os.path.join(os.path.dirname(__file__),
276+
'tokenizedata',
277+
'badsyntax_3131.py')
270278
rc, stdout, stderr = self.pycompilecmd_failure('-q', bad_syntax)
271279
self.assertEqual(rc, 1)
272280
self.assertEqual(stdout, b'')

Lib/test/test_source_encoding.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,7 @@ def test_issue7820(self):
6969
def test_20731(self):
7070
sub = subprocess.Popen([sys.executable,
7171
os.path.join(os.path.dirname(__file__),
72+
'tokenizedata',
7273
'coding20731.py')],
7374
stderr=subprocess.PIPE)
7475
err = sub.communicate()[1]
@@ -101,10 +102,10 @@ def test_bad_coding2(self):
101102
self.verify_bad_module(module_name)
102103

103104
def verify_bad_module(self, module_name):
104-
self.assertRaises(SyntaxError, __import__, 'test.' + module_name)
105+
self.assertRaises(SyntaxError, __import__, 'test.tokenizedata.' + module_name)
105106

106107
path = os.path.dirname(__file__)
107-
filename = os.path.join(path, module_name + '.py')
108+
filename = os.path.join(path, 'tokenizedata', module_name + '.py')
108109
with open(filename, "rb") as fp:
109110
bytes = fp.read()
110111
self.assertRaises(SyntaxError, compile, bytes, filename, 'exec')

Lib/test/test_tarfile.py

+18-11
Original file line numberDiff line numberDiff line change
@@ -2493,16 +2493,17 @@ def tarfilecmd_failure(self, *args):
24932493
return script_helper.assert_python_failure('-m', 'tarfile', *args)
24942494

24952495
def make_simple_tarfile(self, tar_name):
2496-
files = [support.findfile('tokenize_tests.txt'),
2496+
files = [support.findfile('tokenize_tests.txt',
2497+
subdir='tokenizedata'),
24972498
support.findfile('tokenize_tests-no-coding-cookie-'
2498-
'and-utf8-bom-sig-only.txt')]
2499+
'and-utf8-bom-sig-only.txt',
2500+
subdir='tokenizedata')]
24992501
self.addCleanup(os_helper.unlink, tar_name)
25002502
with tarfile.open(tar_name, 'w') as tf:
25012503
for tardata in files:
25022504
tf.add(tardata, arcname=os.path.basename(tardata))
25032505

25042506
def make_evil_tarfile(self, tar_name):
2505-
files = [support.findfile('tokenize_tests.txt')]
25062507
self.addCleanup(os_helper.unlink, tar_name)
25072508
with tarfile.open(tar_name, 'w') as tf:
25082509
benign = tarfile.TarInfo('benign')
@@ -2583,9 +2584,11 @@ def test_list_command_invalid_file(self):
25832584
self.assertEqual(rc, 1)
25842585

25852586
def test_create_command(self):
2586-
files = [support.findfile('tokenize_tests.txt'),
2587+
files = [support.findfile('tokenize_tests.txt',
2588+
subdir='tokenizedata'),
25872589
support.findfile('tokenize_tests-no-coding-cookie-'
2588-
'and-utf8-bom-sig-only.txt')]
2590+
'and-utf8-bom-sig-only.txt',
2591+
subdir='tokenizedata')]
25892592
for opt in '-c', '--create':
25902593
try:
25912594
out = self.tarfilecmd(opt, tmpname, *files)
@@ -2596,9 +2599,11 @@ def test_create_command(self):
25962599
os_helper.unlink(tmpname)
25972600

25982601
def test_create_command_verbose(self):
2599-
files = [support.findfile('tokenize_tests.txt'),
2602+
files = [support.findfile('tokenize_tests.txt',
2603+
subdir='tokenizedata'),
26002604
support.findfile('tokenize_tests-no-coding-cookie-'
2601-
'and-utf8-bom-sig-only.txt')]
2605+
'and-utf8-bom-sig-only.txt',
2606+
subdir='tokenizedata')]
26022607
for opt in '-v', '--verbose':
26032608
try:
26042609
out = self.tarfilecmd(opt, '-c', tmpname, *files,
@@ -2610,7 +2615,7 @@ def test_create_command_verbose(self):
26102615
os_helper.unlink(tmpname)
26112616

26122617
def test_create_command_dotless_filename(self):
2613-
files = [support.findfile('tokenize_tests.txt')]
2618+
files = [support.findfile('tokenize_tests.txt', subdir='tokenizedata')]
26142619
try:
26152620
out = self.tarfilecmd('-c', dotlessname, *files)
26162621
self.assertEqual(out, b'')
@@ -2621,7 +2626,7 @@ def test_create_command_dotless_filename(self):
26212626

26222627
def test_create_command_dot_started_filename(self):
26232628
tar_name = os.path.join(TEMPDIR, ".testtar")
2624-
files = [support.findfile('tokenize_tests.txt')]
2629+
files = [support.findfile('tokenize_tests.txt', subdir='tokenizedata')]
26252630
try:
26262631
out = self.tarfilecmd('-c', tar_name, *files)
26272632
self.assertEqual(out, b'')
@@ -2631,9 +2636,11 @@ def test_create_command_dot_started_filename(self):
26312636
os_helper.unlink(tar_name)
26322637

26332638
def test_create_command_compressed(self):
2634-
files = [support.findfile('tokenize_tests.txt'),
2639+
files = [support.findfile('tokenize_tests.txt',
2640+
subdir='tokenizedata'),
26352641
support.findfile('tokenize_tests-no-coding-cookie-'
2636-
'and-utf8-bom-sig-only.txt')]
2642+
'and-utf8-bom-sig-only.txt',
2643+
subdir='tokenizedata')]
26372644
for filetype in (GzipTest, Bz2Test, LzmaTest):
26382645
if not filetype.open:
26392646
continue

Lib/test/test_tokenize.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -999,7 +999,7 @@ class TestTokenizerAdheresToPep0263(TestCase):
999999
"""
10001000

10011001
def _testFile(self, filename):
1002-
path = os.path.join(os.path.dirname(__file__), filename)
1002+
path = os.path.join(os.path.dirname(__file__), 'tokenizedata', filename)
10031003
TestRoundtrip.check_roundtrip(self, open(path, 'rb'))
10041004

10051005
def test_utf8_coding_cookie_and_no_utf8_bom(self):
@@ -1560,7 +1560,7 @@ def test_roundtrip(self):
15601560

15611561
self.check_roundtrip("if x == 1 : \n"
15621562
" print(x)\n")
1563-
fn = support.findfile("tokenize_tests.txt")
1563+
fn = support.findfile("tokenize_tests.txt", subdir="tokenizedata")
15641564
with open(fn, 'rb') as f:
15651565
self.check_roundtrip(f)
15661566
self.check_roundtrip("if x == 1:\n"
@@ -1615,8 +1615,7 @@ def test_random_files(self):
16151615
# pass the '-ucpu' option to process the full directory.
16161616

16171617
import glob, random
1618-
fn = support.findfile("tokenize_tests.txt")
1619-
tempdir = os.path.dirname(fn) or os.curdir
1618+
tempdir = os.path.dirname(__file__) or os.curdir
16201619
testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py"))
16211620

16221621
# Tokenize is broken on test_pep3131.py because regular expressions are

Lib/test/test_tools/test_reindent.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def test_help(self):
2525
self.assertGreater(err, b'')
2626

2727
def test_reindent_file_with_bad_encoding(self):
28-
bad_coding_path = findfile('bad_coding.py')
28+
bad_coding_path = findfile('bad_coding.py', subdir='tokenizedata')
2929
rc, out, err = assert_python_ok(self.script, '-r', bad_coding_path)
3030
self.assertEqual(out, b'')
3131
self.assertNotEqual(err, b'')

Lib/test/test_unicode_identifiers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def test_non_bmp_normalized(self):
1919

2020
def test_invalid(self):
2121
try:
22-
from test import badsyntax_3131
22+
from test.tokenizedata import badsyntax_3131
2323
except SyntaxError as err:
2424
self.assertEqual(str(err),
2525
"invalid character '€' (U+20AC) (badsyntax_3131.py, line 2)")

Lib/test/tokenizedata/__init__.py

Whitespace-only changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.

Makefile.pre.in

+1
Original file line numberDiff line numberDiff line change
@@ -2043,6 +2043,7 @@ TESTSUBDIRS= ctypes/test \
20432043
test/test_warnings/data \
20442044
test/test_zoneinfo \
20452045
test/test_zoneinfo/data \
2046+
test/tokenizedata \
20462047
test/tracedmodules \
20472048
test/typinganndata \
20482049
test/xmltestdata \

0 commit comments

Comments
 (0)