test_compileall.py 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122
  1. import compileall
  2. import contextlib
  3. import filecmp
  4. import importlib.util
  5. import io
  6. import os
  7. import pathlib
  8. import py_compile
  9. import shutil
  10. import struct
  11. import sys
  12. import tempfile
  13. import test.test_importlib.util
  14. import time
  15. import unittest
  16. from unittest import mock, skipUnless
  17. try:
  18. # compileall relies on ProcessPoolExecutor if ProcessPoolExecutor exists
  19. # and it can function.
  20. from concurrent.futures import ProcessPoolExecutor
  21. from concurrent.futures.process import _check_system_limits
  22. _check_system_limits()
  23. _have_multiprocessing = True
  24. except (NotImplementedError, ModuleNotFoundError):
  25. _have_multiprocessing = False
  26. from test import support
  27. from test.support import os_helper
  28. from test.support import script_helper
  29. from test.test_py_compile import without_source_date_epoch
  30. from test.test_py_compile import SourceDateEpochTestMeta
  31. def get_pyc(script, opt):
  32. if not opt:
  33. # Replace None and 0 with ''
  34. opt = ''
  35. return importlib.util.cache_from_source(script, optimization=opt)
  36. def get_pycs(script):
  37. return [get_pyc(script, opt) for opt in (0, 1, 2)]
  38. def is_hardlink(filename1, filename2):
  39. """Returns True if two files have the same inode (hardlink)"""
  40. inode1 = os.stat(filename1).st_ino
  41. inode2 = os.stat(filename2).st_ino
  42. return inode1 == inode2
  43. class CompileallTestsBase:
  44. def setUp(self):
  45. self.directory = tempfile.mkdtemp()
  46. self.source_path = os.path.join(self.directory, '_test.py')
  47. self.bc_path = importlib.util.cache_from_source(self.source_path)
  48. with open(self.source_path, 'w', encoding="utf-8") as file:
  49. file.write('x = 123\n')
  50. self.source_path2 = os.path.join(self.directory, '_test2.py')
  51. self.bc_path2 = importlib.util.cache_from_source(self.source_path2)
  52. shutil.copyfile(self.source_path, self.source_path2)
  53. self.subdirectory = os.path.join(self.directory, '_subdir')
  54. os.mkdir(self.subdirectory)
  55. self.source_path3 = os.path.join(self.subdirectory, '_test3.py')
  56. shutil.copyfile(self.source_path, self.source_path3)
  57. def tearDown(self):
  58. shutil.rmtree(self.directory)
  59. def add_bad_source_file(self):
  60. self.bad_source_path = os.path.join(self.directory, '_test_bad.py')
  61. with open(self.bad_source_path, 'w', encoding="utf-8") as file:
  62. file.write('x (\n')
  63. def timestamp_metadata(self):
  64. with open(self.bc_path, 'rb') as file:
  65. data = file.read(12)
  66. mtime = int(os.stat(self.source_path).st_mtime)
  67. compare = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER, 0,
  68. mtime & 0xFFFF_FFFF)
  69. return data, compare
  70. def test_year_2038_mtime_compilation(self):
  71. # Test to make sure we can handle mtimes larger than what a 32-bit
  72. # signed number can hold as part of bpo-34990
  73. try:
  74. os.utime(self.source_path, (2**32 - 1, 2**32 - 1))
  75. except (OverflowError, OSError):
  76. self.skipTest("filesystem doesn't support timestamps near 2**32")
  77. with contextlib.redirect_stdout(io.StringIO()):
  78. self.assertTrue(compileall.compile_file(self.source_path))
  79. def test_larger_than_32_bit_times(self):
  80. # This is similar to the test above but we skip it if the OS doesn't
  81. # support modification times larger than 32-bits.
  82. try:
  83. os.utime(self.source_path, (2**35, 2**35))
  84. except (OverflowError, OSError):
  85. self.skipTest("filesystem doesn't support large timestamps")
  86. with contextlib.redirect_stdout(io.StringIO()):
  87. self.assertTrue(compileall.compile_file(self.source_path))
  88. def recreation_check(self, metadata):
  89. """Check that compileall recreates bytecode when the new metadata is
  90. used."""
  91. if os.environ.get('SOURCE_DATE_EPOCH'):
  92. raise unittest.SkipTest('SOURCE_DATE_EPOCH is set')
  93. py_compile.compile(self.source_path)
  94. self.assertEqual(*self.timestamp_metadata())
  95. with open(self.bc_path, 'rb') as file:
  96. bc = file.read()[len(metadata):]
  97. with open(self.bc_path, 'wb') as file:
  98. file.write(metadata)
  99. file.write(bc)
  100. self.assertNotEqual(*self.timestamp_metadata())
  101. compileall.compile_dir(self.directory, force=False, quiet=True)
  102. self.assertTrue(*self.timestamp_metadata())
  103. def test_mtime(self):
  104. # Test a change in mtime leads to a new .pyc.
  105. self.recreation_check(struct.pack('<4sLL', importlib.util.MAGIC_NUMBER,
  106. 0, 1))
  107. def test_magic_number(self):
  108. # Test a change in mtime leads to a new .pyc.
  109. self.recreation_check(b'\0\0\0\0')
  110. def test_compile_files(self):
  111. # Test compiling a single file, and complete directory
  112. for fn in (self.bc_path, self.bc_path2):
  113. try:
  114. os.unlink(fn)
  115. except:
  116. pass
  117. self.assertTrue(compileall.compile_file(self.source_path,
  118. force=False, quiet=True))
  119. self.assertTrue(os.path.isfile(self.bc_path) and
  120. not os.path.isfile(self.bc_path2))
  121. os.unlink(self.bc_path)
  122. self.assertTrue(compileall.compile_dir(self.directory, force=False,
  123. quiet=True))
  124. self.assertTrue(os.path.isfile(self.bc_path) and
  125. os.path.isfile(self.bc_path2))
  126. os.unlink(self.bc_path)
  127. os.unlink(self.bc_path2)
  128. # Test against bad files
  129. self.add_bad_source_file()
  130. self.assertFalse(compileall.compile_file(self.bad_source_path,
  131. force=False, quiet=2))
  132. self.assertFalse(compileall.compile_dir(self.directory,
  133. force=False, quiet=2))
  134. def test_compile_file_pathlike(self):
  135. self.assertFalse(os.path.isfile(self.bc_path))
  136. # we should also test the output
  137. with support.captured_stdout() as stdout:
  138. self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path)))
  139. self.assertRegex(stdout.getvalue(), r'Compiling ([^WindowsPath|PosixPath].*)')
  140. self.assertTrue(os.path.isfile(self.bc_path))
  141. def test_compile_file_pathlike_ddir(self):
  142. self.assertFalse(os.path.isfile(self.bc_path))
  143. self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path),
  144. ddir=pathlib.Path('ddir_path'),
  145. quiet=2))
  146. self.assertTrue(os.path.isfile(self.bc_path))
  147. def test_compile_file_pathlike_stripdir(self):
  148. self.assertFalse(os.path.isfile(self.bc_path))
  149. self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path),
  150. stripdir=pathlib.Path('stripdir_path'),
  151. quiet=2))
  152. self.assertTrue(os.path.isfile(self.bc_path))
  153. def test_compile_file_pathlike_prependdir(self):
  154. self.assertFalse(os.path.isfile(self.bc_path))
  155. self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path),
  156. prependdir=pathlib.Path('prependdir_path'),
  157. quiet=2))
  158. self.assertTrue(os.path.isfile(self.bc_path))
  159. def test_compile_path(self):
  160. with test.test_importlib.util.import_state(path=[self.directory]):
  161. self.assertTrue(compileall.compile_path(quiet=2))
  162. with test.test_importlib.util.import_state(path=[self.directory]):
  163. self.add_bad_source_file()
  164. self.assertFalse(compileall.compile_path(skip_curdir=False,
  165. force=True, quiet=2))
  166. def test_no_pycache_in_non_package(self):
  167. # Bug 8563 reported that __pycache__ directories got created by
  168. # compile_file() for non-.py files.
  169. data_dir = os.path.join(self.directory, 'data')
  170. data_file = os.path.join(data_dir, 'file')
  171. os.mkdir(data_dir)
  172. # touch data/file
  173. with open(data_file, 'wb'):
  174. pass
  175. compileall.compile_file(data_file)
  176. self.assertFalse(os.path.exists(os.path.join(data_dir, '__pycache__')))
  177. def test_compile_file_encoding_fallback(self):
  178. # Bug 44666 reported that compile_file failed when sys.stdout.encoding is None
  179. self.add_bad_source_file()
  180. with contextlib.redirect_stdout(io.StringIO()):
  181. self.assertFalse(compileall.compile_file(self.bad_source_path))
  182. def test_optimize(self):
  183. # make sure compiling with different optimization settings than the
  184. # interpreter's creates the correct file names
  185. optimize, opt = (1, 1) if __debug__ else (0, '')
  186. compileall.compile_dir(self.directory, quiet=True, optimize=optimize)
  187. cached = importlib.util.cache_from_source(self.source_path,
  188. optimization=opt)
  189. self.assertTrue(os.path.isfile(cached))
  190. cached2 = importlib.util.cache_from_source(self.source_path2,
  191. optimization=opt)
  192. self.assertTrue(os.path.isfile(cached2))
  193. cached3 = importlib.util.cache_from_source(self.source_path3,
  194. optimization=opt)
  195. self.assertTrue(os.path.isfile(cached3))
  196. def test_compile_dir_pathlike(self):
  197. self.assertFalse(os.path.isfile(self.bc_path))
  198. with support.captured_stdout() as stdout:
  199. compileall.compile_dir(pathlib.Path(self.directory))
  200. line = stdout.getvalue().splitlines()[0]
  201. self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)')
  202. self.assertTrue(os.path.isfile(self.bc_path))
  203. def test_compile_dir_pathlike_stripdir(self):
  204. self.assertFalse(os.path.isfile(self.bc_path))
  205. self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory),
  206. stripdir=pathlib.Path('stripdir_path'),
  207. quiet=2))
  208. self.assertTrue(os.path.isfile(self.bc_path))
  209. def test_compile_dir_pathlike_prependdir(self):
  210. self.assertFalse(os.path.isfile(self.bc_path))
  211. self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory),
  212. prependdir=pathlib.Path('prependdir_path'),
  213. quiet=2))
  214. self.assertTrue(os.path.isfile(self.bc_path))
  215. @skipUnless(_have_multiprocessing, "requires multiprocessing")
  216. @mock.patch('concurrent.futures.ProcessPoolExecutor')
  217. def test_compile_pool_called(self, pool_mock):
  218. compileall.compile_dir(self.directory, quiet=True, workers=5)
  219. self.assertTrue(pool_mock.called)
  220. def test_compile_workers_non_positive(self):
  221. with self.assertRaisesRegex(ValueError,
  222. "workers must be greater or equal to 0"):
  223. compileall.compile_dir(self.directory, workers=-1)
  224. @skipUnless(_have_multiprocessing, "requires multiprocessing")
  225. @mock.patch('concurrent.futures.ProcessPoolExecutor')
  226. def test_compile_workers_cpu_count(self, pool_mock):
  227. compileall.compile_dir(self.directory, quiet=True, workers=0)
  228. self.assertEqual(pool_mock.call_args[1]['max_workers'], None)
  229. @skipUnless(_have_multiprocessing, "requires multiprocessing")
  230. @mock.patch('concurrent.futures.ProcessPoolExecutor')
  231. @mock.patch('compileall.compile_file')
  232. def test_compile_one_worker(self, compile_file_mock, pool_mock):
  233. compileall.compile_dir(self.directory, quiet=True)
  234. self.assertFalse(pool_mock.called)
  235. self.assertTrue(compile_file_mock.called)
  236. @skipUnless(_have_multiprocessing, "requires multiprocessing")
  237. @mock.patch('concurrent.futures.ProcessPoolExecutor', new=None)
  238. @mock.patch('compileall.compile_file')
  239. def test_compile_missing_multiprocessing(self, compile_file_mock):
  240. compileall.compile_dir(self.directory, quiet=True, workers=5)
  241. self.assertTrue(compile_file_mock.called)
  242. def test_compile_dir_maxlevels(self):
  243. # Test the actual impact of maxlevels parameter
  244. depth = 3
  245. path = self.directory
  246. for i in range(1, depth + 1):
  247. path = os.path.join(path, f"dir_{i}")
  248. source = os.path.join(path, 'script.py')
  249. os.mkdir(path)
  250. shutil.copyfile(self.source_path, source)
  251. pyc_filename = importlib.util.cache_from_source(source)
  252. compileall.compile_dir(self.directory, quiet=True, maxlevels=depth - 1)
  253. self.assertFalse(os.path.isfile(pyc_filename))
  254. compileall.compile_dir(self.directory, quiet=True, maxlevels=depth)
  255. self.assertTrue(os.path.isfile(pyc_filename))
  256. def _test_ddir_only(self, *, ddir, parallel=True):
  257. """Recursive compile_dir ddir must contain package paths; bpo39769."""
  258. fullpath = ["test", "foo"]
  259. path = self.directory
  260. mods = []
  261. for subdir in fullpath:
  262. path = os.path.join(path, subdir)
  263. os.mkdir(path)
  264. script_helper.make_script(path, "__init__", "")
  265. mods.append(script_helper.make_script(path, "mod",
  266. "def fn(): 1/0\nfn()\n"))
  267. compileall.compile_dir(
  268. self.directory, quiet=True, ddir=ddir,
  269. workers=2 if parallel else 1)
  270. self.assertTrue(mods)
  271. for mod in mods:
  272. self.assertTrue(mod.startswith(self.directory), mod)
  273. modcode = importlib.util.cache_from_source(mod)
  274. modpath = mod[len(self.directory+os.sep):]
  275. _, _, err = script_helper.assert_python_failure(modcode)
  276. expected_in = os.path.join(ddir, modpath)
  277. mod_code_obj = test.test_importlib.util.get_code_from_pyc(modcode)
  278. self.assertEqual(mod_code_obj.co_filename, expected_in)
  279. self.assertIn(f'"{expected_in}"', os.fsdecode(err))
  280. def test_ddir_only_one_worker(self):
  281. """Recursive compile_dir ddir= contains package paths; bpo39769."""
  282. return self._test_ddir_only(ddir="<a prefix>", parallel=False)
  283. @skipUnless(_have_multiprocessing, "requires multiprocessing")
  284. def test_ddir_multiple_workers(self):
  285. """Recursive compile_dir ddir= contains package paths; bpo39769."""
  286. return self._test_ddir_only(ddir="<a prefix>", parallel=True)
  287. def test_ddir_empty_only_one_worker(self):
  288. """Recursive compile_dir ddir='' contains package paths; bpo39769."""
  289. return self._test_ddir_only(ddir="", parallel=False)
  290. @skipUnless(_have_multiprocessing, "requires multiprocessing")
  291. def test_ddir_empty_multiple_workers(self):
  292. """Recursive compile_dir ddir='' contains package paths; bpo39769."""
  293. return self._test_ddir_only(ddir="", parallel=True)
  294. def test_strip_only(self):
  295. fullpath = ["test", "build", "real", "path"]
  296. path = os.path.join(self.directory, *fullpath)
  297. os.makedirs(path)
  298. script = script_helper.make_script(path, "test", "1 / 0")
  299. bc = importlib.util.cache_from_source(script)
  300. stripdir = os.path.join(self.directory, *fullpath[:2])
  301. compileall.compile_dir(path, quiet=True, stripdir=stripdir)
  302. rc, out, err = script_helper.assert_python_failure(bc)
  303. expected_in = os.path.join(*fullpath[2:])
  304. self.assertIn(
  305. expected_in,
  306. str(err, encoding=sys.getdefaultencoding())
  307. )
  308. self.assertNotIn(
  309. stripdir,
  310. str(err, encoding=sys.getdefaultencoding())
  311. )
  312. def test_prepend_only(self):
  313. fullpath = ["test", "build", "real", "path"]
  314. path = os.path.join(self.directory, *fullpath)
  315. os.makedirs(path)
  316. script = script_helper.make_script(path, "test", "1 / 0")
  317. bc = importlib.util.cache_from_source(script)
  318. prependdir = "/foo"
  319. compileall.compile_dir(path, quiet=True, prependdir=prependdir)
  320. rc, out, err = script_helper.assert_python_failure(bc)
  321. expected_in = os.path.join(prependdir, self.directory, *fullpath)
  322. self.assertIn(
  323. expected_in,
  324. str(err, encoding=sys.getdefaultencoding())
  325. )
  326. def test_strip_and_prepend(self):
  327. fullpath = ["test", "build", "real", "path"]
  328. path = os.path.join(self.directory, *fullpath)
  329. os.makedirs(path)
  330. script = script_helper.make_script(path, "test", "1 / 0")
  331. bc = importlib.util.cache_from_source(script)
  332. stripdir = os.path.join(self.directory, *fullpath[:2])
  333. prependdir = "/foo"
  334. compileall.compile_dir(path, quiet=True,
  335. stripdir=stripdir, prependdir=prependdir)
  336. rc, out, err = script_helper.assert_python_failure(bc)
  337. expected_in = os.path.join(prependdir, *fullpath[2:])
  338. self.assertIn(
  339. expected_in,
  340. str(err, encoding=sys.getdefaultencoding())
  341. )
  342. self.assertNotIn(
  343. stripdir,
  344. str(err, encoding=sys.getdefaultencoding())
  345. )
  346. def test_strip_prepend_and_ddir(self):
  347. fullpath = ["test", "build", "real", "path", "ddir"]
  348. path = os.path.join(self.directory, *fullpath)
  349. os.makedirs(path)
  350. script_helper.make_script(path, "test", "1 / 0")
  351. with self.assertRaises(ValueError):
  352. compileall.compile_dir(path, quiet=True, ddir="/bar",
  353. stripdir="/foo", prependdir="/bar")
  354. def test_multiple_optimization_levels(self):
  355. script = script_helper.make_script(self.directory,
  356. "test_optimization",
  357. "a = 0")
  358. bc = []
  359. for opt_level in "", 1, 2, 3:
  360. bc.append(importlib.util.cache_from_source(script,
  361. optimization=opt_level))
  362. test_combinations = [[0, 1], [1, 2], [0, 2], [0, 1, 2]]
  363. for opt_combination in test_combinations:
  364. compileall.compile_file(script, quiet=True,
  365. optimize=opt_combination)
  366. for opt_level in opt_combination:
  367. self.assertTrue(os.path.isfile(bc[opt_level]))
  368. try:
  369. os.unlink(bc[opt_level])
  370. except Exception:
  371. pass
  372. @os_helper.skip_unless_symlink
  373. def test_ignore_symlink_destination(self):
  374. # Create folders for allowed files, symlinks and prohibited area
  375. allowed_path = os.path.join(self.directory, "test", "dir", "allowed")
  376. symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks")
  377. prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited")
  378. os.makedirs(allowed_path)
  379. os.makedirs(symlinks_path)
  380. os.makedirs(prohibited_path)
  381. # Create scripts and symlinks and remember their byte-compiled versions
  382. allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0")
  383. prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0")
  384. allowed_symlink = os.path.join(symlinks_path, "test_allowed.py")
  385. prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py")
  386. os.symlink(allowed_script, allowed_symlink)
  387. os.symlink(prohibited_script, prohibited_symlink)
  388. allowed_bc = importlib.util.cache_from_source(allowed_symlink)
  389. prohibited_bc = importlib.util.cache_from_source(prohibited_symlink)
  390. compileall.compile_dir(symlinks_path, quiet=True, limit_sl_dest=allowed_path)
  391. self.assertTrue(os.path.isfile(allowed_bc))
  392. self.assertFalse(os.path.isfile(prohibited_bc))
  393. class CompileallTestsWithSourceEpoch(CompileallTestsBase,
  394. unittest.TestCase,
  395. metaclass=SourceDateEpochTestMeta,
  396. source_date_epoch=True):
  397. pass
  398. class CompileallTestsWithoutSourceEpoch(CompileallTestsBase,
  399. unittest.TestCase,
  400. metaclass=SourceDateEpochTestMeta,
  401. source_date_epoch=False):
  402. pass
  403. # WASI does not have a temp directory and uses cwd instead. The cwd contains
  404. # non-ASCII chars, so _walk_dir() fails to encode self.directory.
  405. @unittest.skipIf(support.is_wasi, "tempdir is not encodable on WASI")
  406. class EncodingTest(unittest.TestCase):
  407. """Issue 6716: compileall should escape source code when printing errors
  408. to stdout."""
  409. def setUp(self):
  410. self.directory = tempfile.mkdtemp()
  411. self.source_path = os.path.join(self.directory, '_test.py')
  412. with open(self.source_path, 'w', encoding='utf-8') as file:
  413. file.write('# -*- coding: utf-8 -*-\n')
  414. file.write('print u"\u20ac"\n')
  415. def tearDown(self):
  416. shutil.rmtree(self.directory)
  417. def test_error(self):
  418. try:
  419. orig_stdout = sys.stdout
  420. sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii')
  421. compileall.compile_dir(self.directory)
  422. finally:
  423. sys.stdout = orig_stdout
  424. class CommandLineTestsBase:
  425. """Test compileall's CLI."""
  426. def setUp(self):
  427. self.directory = tempfile.mkdtemp()
  428. self.addCleanup(os_helper.rmtree, self.directory)
  429. self.pkgdir = os.path.join(self.directory, 'foo')
  430. os.mkdir(self.pkgdir)
  431. self.pkgdir_cachedir = os.path.join(self.pkgdir, '__pycache__')
  432. # Create the __init__.py and a package module.
  433. self.initfn = script_helper.make_script(self.pkgdir, '__init__', '')
  434. self.barfn = script_helper.make_script(self.pkgdir, 'bar', '')
  435. @contextlib.contextmanager
  436. def temporary_pycache_prefix(self):
  437. """Adjust and restore sys.pycache_prefix."""
  438. old_prefix = sys.pycache_prefix
  439. new_prefix = os.path.join(self.directory, '__testcache__')
  440. try:
  441. sys.pycache_prefix = new_prefix
  442. yield {
  443. 'PYTHONPATH': self.directory,
  444. 'PYTHONPYCACHEPREFIX': new_prefix,
  445. }
  446. finally:
  447. sys.pycache_prefix = old_prefix
  448. def _get_run_args(self, args):
  449. return [*support.optim_args_from_interpreter_flags(),
  450. '-S', '-m', 'compileall',
  451. *args]
  452. def assertRunOK(self, *args, **env_vars):
  453. rc, out, err = script_helper.assert_python_ok(
  454. *self._get_run_args(args), **env_vars,
  455. PYTHONIOENCODING='utf-8')
  456. self.assertEqual(b'', err)
  457. return out
  458. def assertRunNotOK(self, *args, **env_vars):
  459. rc, out, err = script_helper.assert_python_failure(
  460. *self._get_run_args(args), **env_vars,
  461. PYTHONIOENCODING='utf-8')
  462. return rc, out, err
  463. def assertCompiled(self, fn):
  464. path = importlib.util.cache_from_source(fn)
  465. self.assertTrue(os.path.exists(path))
  466. def assertNotCompiled(self, fn):
  467. path = importlib.util.cache_from_source(fn)
  468. self.assertFalse(os.path.exists(path))
  469. def test_no_args_compiles_path(self):
  470. # Note that -l is implied for the no args case.
  471. bazfn = script_helper.make_script(self.directory, 'baz', '')
  472. with self.temporary_pycache_prefix() as env:
  473. self.assertRunOK(**env)
  474. self.assertCompiled(bazfn)
  475. self.assertNotCompiled(self.initfn)
  476. self.assertNotCompiled(self.barfn)
  477. @without_source_date_epoch # timestamp invalidation test
  478. def test_no_args_respects_force_flag(self):
  479. bazfn = script_helper.make_script(self.directory, 'baz', '')
  480. with self.temporary_pycache_prefix() as env:
  481. self.assertRunOK(**env)
  482. pycpath = importlib.util.cache_from_source(bazfn)
  483. # Set atime/mtime backward to avoid file timestamp resolution issues
  484. os.utime(pycpath, (time.time()-60,)*2)
  485. mtime = os.stat(pycpath).st_mtime
  486. # Without force, no recompilation
  487. self.assertRunOK(**env)
  488. mtime2 = os.stat(pycpath).st_mtime
  489. self.assertEqual(mtime, mtime2)
  490. # Now force it.
  491. self.assertRunOK('-f', **env)
  492. mtime2 = os.stat(pycpath).st_mtime
  493. self.assertNotEqual(mtime, mtime2)
  494. def test_no_args_respects_quiet_flag(self):
  495. script_helper.make_script(self.directory, 'baz', '')
  496. with self.temporary_pycache_prefix() as env:
  497. noisy = self.assertRunOK(**env)
  498. self.assertIn(b'Listing ', noisy)
  499. quiet = self.assertRunOK('-q', **env)
  500. self.assertNotIn(b'Listing ', quiet)
  501. # Ensure that the default behavior of compileall's CLI is to create
  502. # PEP 3147/PEP 488 pyc files.
  503. for name, ext, switch in [
  504. ('normal', 'pyc', []),
  505. ('optimize', 'opt-1.pyc', ['-O']),
  506. ('doubleoptimize', 'opt-2.pyc', ['-OO']),
  507. ]:
  508. def f(self, ext=ext, switch=switch):
  509. script_helper.assert_python_ok(*(switch +
  510. ['-m', 'compileall', '-q', self.pkgdir]))
  511. # Verify the __pycache__ directory contents.
  512. self.assertTrue(os.path.exists(self.pkgdir_cachedir))
  513. expected = sorted(base.format(sys.implementation.cache_tag, ext)
  514. for base in ('__init__.{}.{}', 'bar.{}.{}'))
  515. self.assertEqual(sorted(os.listdir(self.pkgdir_cachedir)), expected)
  516. # Make sure there are no .pyc files in the source directory.
  517. self.assertFalse([fn for fn in os.listdir(self.pkgdir)
  518. if fn.endswith(ext)])
  519. locals()['test_pep3147_paths_' + name] = f
  520. def test_legacy_paths(self):
  521. # Ensure that with the proper switch, compileall leaves legacy
  522. # pyc files, and no __pycache__ directory.
  523. self.assertRunOK('-b', '-q', self.pkgdir)
  524. # Verify the __pycache__ directory contents.
  525. self.assertFalse(os.path.exists(self.pkgdir_cachedir))
  526. expected = sorted(['__init__.py', '__init__.pyc', 'bar.py',
  527. 'bar.pyc'])
  528. self.assertEqual(sorted(os.listdir(self.pkgdir)), expected)
  529. def test_multiple_runs(self):
  530. # Bug 8527 reported that multiple calls produced empty
  531. # __pycache__/__pycache__ directories.
  532. self.assertRunOK('-q', self.pkgdir)
  533. # Verify the __pycache__ directory contents.
  534. self.assertTrue(os.path.exists(self.pkgdir_cachedir))
  535. cachecachedir = os.path.join(self.pkgdir_cachedir, '__pycache__')
  536. self.assertFalse(os.path.exists(cachecachedir))
  537. # Call compileall again.
  538. self.assertRunOK('-q', self.pkgdir)
  539. self.assertTrue(os.path.exists(self.pkgdir_cachedir))
  540. self.assertFalse(os.path.exists(cachecachedir))
  541. @without_source_date_epoch # timestamp invalidation test
  542. def test_force(self):
  543. self.assertRunOK('-q', self.pkgdir)
  544. pycpath = importlib.util.cache_from_source(self.barfn)
  545. # set atime/mtime backward to avoid file timestamp resolution issues
  546. os.utime(pycpath, (time.time()-60,)*2)
  547. mtime = os.stat(pycpath).st_mtime
  548. # without force, no recompilation
  549. self.assertRunOK('-q', self.pkgdir)
  550. mtime2 = os.stat(pycpath).st_mtime
  551. self.assertEqual(mtime, mtime2)
  552. # now force it.
  553. self.assertRunOK('-q', '-f', self.pkgdir)
  554. mtime2 = os.stat(pycpath).st_mtime
  555. self.assertNotEqual(mtime, mtime2)
  556. def test_recursion_control(self):
  557. subpackage = os.path.join(self.pkgdir, 'spam')
  558. os.mkdir(subpackage)
  559. subinitfn = script_helper.make_script(subpackage, '__init__', '')
  560. hamfn = script_helper.make_script(subpackage, 'ham', '')
  561. self.assertRunOK('-q', '-l', self.pkgdir)
  562. self.assertNotCompiled(subinitfn)
  563. self.assertFalse(os.path.exists(os.path.join(subpackage, '__pycache__')))
  564. self.assertRunOK('-q', self.pkgdir)
  565. self.assertCompiled(subinitfn)
  566. self.assertCompiled(hamfn)
  567. def test_recursion_limit(self):
  568. subpackage = os.path.join(self.pkgdir, 'spam')
  569. subpackage2 = os.path.join(subpackage, 'ham')
  570. subpackage3 = os.path.join(subpackage2, 'eggs')
  571. for pkg in (subpackage, subpackage2, subpackage3):
  572. script_helper.make_pkg(pkg)
  573. subinitfn = os.path.join(subpackage, '__init__.py')
  574. hamfn = script_helper.make_script(subpackage, 'ham', '')
  575. spamfn = script_helper.make_script(subpackage2, 'spam', '')
  576. eggfn = script_helper.make_script(subpackage3, 'egg', '')
  577. self.assertRunOK('-q', '-r 0', self.pkgdir)
  578. self.assertNotCompiled(subinitfn)
  579. self.assertFalse(
  580. os.path.exists(os.path.join(subpackage, '__pycache__')))
  581. self.assertRunOK('-q', '-r 1', self.pkgdir)
  582. self.assertCompiled(subinitfn)
  583. self.assertCompiled(hamfn)
  584. self.assertNotCompiled(spamfn)
  585. self.assertRunOK('-q', '-r 2', self.pkgdir)
  586. self.assertCompiled(subinitfn)
  587. self.assertCompiled(hamfn)
  588. self.assertCompiled(spamfn)
  589. self.assertNotCompiled(eggfn)
  590. self.assertRunOK('-q', '-r 5', self.pkgdir)
  591. self.assertCompiled(subinitfn)
  592. self.assertCompiled(hamfn)
  593. self.assertCompiled(spamfn)
  594. self.assertCompiled(eggfn)
  595. @os_helper.skip_unless_symlink
  596. def test_symlink_loop(self):
  597. # Currently, compileall ignores symlinks to directories.
  598. # If that limitation is ever lifted, it should protect against
  599. # recursion in symlink loops.
  600. pkg = os.path.join(self.pkgdir, 'spam')
  601. script_helper.make_pkg(pkg)
  602. os.symlink('.', os.path.join(pkg, 'evil'))
  603. os.symlink('.', os.path.join(pkg, 'evil2'))
  604. self.assertRunOK('-q', self.pkgdir)
  605. self.assertCompiled(os.path.join(
  606. self.pkgdir, 'spam', 'evil', 'evil2', '__init__.py'
  607. ))
  608. def test_quiet(self):
  609. noisy = self.assertRunOK(self.pkgdir)
  610. quiet = self.assertRunOK('-q', self.pkgdir)
  611. self.assertNotEqual(b'', noisy)
  612. self.assertEqual(b'', quiet)
  613. def test_silent(self):
  614. script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax')
  615. _, quiet, _ = self.assertRunNotOK('-q', self.pkgdir)
  616. _, silent, _ = self.assertRunNotOK('-qq', self.pkgdir)
  617. self.assertNotEqual(b'', quiet)
  618. self.assertEqual(b'', silent)
  619. def test_regexp(self):
  620. self.assertRunOK('-q', '-x', r'ba[^\\/]*$', self.pkgdir)
  621. self.assertNotCompiled(self.barfn)
  622. self.assertCompiled(self.initfn)
  623. def test_multiple_dirs(self):
  624. pkgdir2 = os.path.join(self.directory, 'foo2')
  625. os.mkdir(pkgdir2)
  626. init2fn = script_helper.make_script(pkgdir2, '__init__', '')
  627. bar2fn = script_helper.make_script(pkgdir2, 'bar2', '')
  628. self.assertRunOK('-q', self.pkgdir, pkgdir2)
  629. self.assertCompiled(self.initfn)
  630. self.assertCompiled(self.barfn)
  631. self.assertCompiled(init2fn)
  632. self.assertCompiled(bar2fn)
  633. def test_d_compile_error(self):
  634. script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax')
  635. rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir)
  636. self.assertRegex(out, b'File "dinsdale')
  637. def test_d_runtime_error(self):
  638. bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception')
  639. self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir)
  640. fn = script_helper.make_script(self.pkgdir, 'bing', 'import baz')
  641. pyc = importlib.util.cache_from_source(bazfn)
  642. os.rename(pyc, os.path.join(self.pkgdir, 'baz.pyc'))
  643. os.remove(bazfn)
  644. rc, out, err = script_helper.assert_python_failure(fn, __isolated=False)
  645. self.assertRegex(err, b'File "dinsdale')
  646. def test_include_bad_file(self):
  647. rc, out, err = self.assertRunNotOK(
  648. '-i', os.path.join(self.directory, 'nosuchfile'), self.pkgdir)
  649. self.assertRegex(out, b'rror.*nosuchfile')
  650. self.assertNotRegex(err, b'Traceback')
  651. self.assertFalse(os.path.exists(importlib.util.cache_from_source(
  652. self.pkgdir_cachedir)))
  653. def test_include_file_with_arg(self):
  654. f1 = script_helper.make_script(self.pkgdir, 'f1', '')
  655. f2 = script_helper.make_script(self.pkgdir, 'f2', '')
  656. f3 = script_helper.make_script(self.pkgdir, 'f3', '')
  657. f4 = script_helper.make_script(self.pkgdir, 'f4', '')
  658. with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1:
  659. l1.write(os.path.join(self.pkgdir, 'f1.py')+os.linesep)
  660. l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep)
  661. self.assertRunOK('-i', os.path.join(self.directory, 'l1'), f4)
  662. self.assertCompiled(f1)
  663. self.assertCompiled(f2)
  664. self.assertNotCompiled(f3)
  665. self.assertCompiled(f4)
  666. def test_include_file_no_arg(self):
  667. f1 = script_helper.make_script(self.pkgdir, 'f1', '')
  668. f2 = script_helper.make_script(self.pkgdir, 'f2', '')
  669. f3 = script_helper.make_script(self.pkgdir, 'f3', '')
  670. f4 = script_helper.make_script(self.pkgdir, 'f4', '')
  671. with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1:
  672. l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep)
  673. self.assertRunOK('-i', os.path.join(self.directory, 'l1'))
  674. self.assertNotCompiled(f1)
  675. self.assertCompiled(f2)
  676. self.assertNotCompiled(f3)
  677. self.assertNotCompiled(f4)
  678. def test_include_on_stdin(self):
  679. f1 = script_helper.make_script(self.pkgdir, 'f1', '')
  680. f2 = script_helper.make_script(self.pkgdir, 'f2', '')
  681. f3 = script_helper.make_script(self.pkgdir, 'f3', '')
  682. f4 = script_helper.make_script(self.pkgdir, 'f4', '')
  683. p = script_helper.spawn_python(*(self._get_run_args(()) + ['-i', '-']))
  684. p.stdin.write((f3+os.linesep).encode('ascii'))
  685. script_helper.kill_python(p)
  686. self.assertNotCompiled(f1)
  687. self.assertNotCompiled(f2)
  688. self.assertCompiled(f3)
  689. self.assertNotCompiled(f4)
  690. def test_compiles_as_much_as_possible(self):
  691. bingfn = script_helper.make_script(self.pkgdir, 'bing', 'syntax(error')
  692. rc, out, err = self.assertRunNotOK('nosuchfile', self.initfn,
  693. bingfn, self.barfn)
  694. self.assertRegex(out, b'rror')
  695. self.assertNotCompiled(bingfn)
  696. self.assertCompiled(self.initfn)
  697. self.assertCompiled(self.barfn)
  698. def test_invalid_arg_produces_message(self):
  699. out = self.assertRunOK('badfilename')
  700. self.assertRegex(out, b"Can't list 'badfilename'")
  701. def test_pyc_invalidation_mode(self):
  702. script_helper.make_script(self.pkgdir, 'f1', '')
  703. pyc = importlib.util.cache_from_source(
  704. os.path.join(self.pkgdir, 'f1.py'))
  705. self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir)
  706. with open(pyc, 'rb') as fp:
  707. data = fp.read()
  708. self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11)
  709. self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir)
  710. with open(pyc, 'rb') as fp:
  711. data = fp.read()
  712. self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b01)
  713. @skipUnless(_have_multiprocessing, "requires multiprocessing")
  714. def test_workers(self):
  715. bar2fn = script_helper.make_script(self.directory, 'bar2', '')
  716. files = []
  717. for suffix in range(5):
  718. pkgdir = os.path.join(self.directory, 'foo{}'.format(suffix))
  719. os.mkdir(pkgdir)
  720. fn = script_helper.make_script(pkgdir, '__init__', '')
  721. files.append(script_helper.make_script(pkgdir, 'bar2', ''))
  722. self.assertRunOK(self.directory, '-j', '0')
  723. self.assertCompiled(bar2fn)
  724. for file in files:
  725. self.assertCompiled(file)
  726. @mock.patch('compileall.compile_dir')
  727. def test_workers_available_cores(self, compile_dir):
  728. with mock.patch("sys.argv",
  729. new=[sys.executable, self.directory, "-j0"]):
  730. compileall.main()
  731. self.assertTrue(compile_dir.called)
  732. self.assertEqual(compile_dir.call_args[-1]['workers'], 0)
  733. def test_strip_and_prepend(self):
  734. fullpath = ["test", "build", "real", "path"]
  735. path = os.path.join(self.directory, *fullpath)
  736. os.makedirs(path)
  737. script = script_helper.make_script(path, "test", "1 / 0")
  738. bc = importlib.util.cache_from_source(script)
  739. stripdir = os.path.join(self.directory, *fullpath[:2])
  740. prependdir = "/foo"
  741. self.assertRunOK("-s", stripdir, "-p", prependdir, path)
  742. rc, out, err = script_helper.assert_python_failure(bc)
  743. expected_in = os.path.join(prependdir, *fullpath[2:])
  744. self.assertIn(
  745. expected_in,
  746. str(err, encoding=sys.getdefaultencoding())
  747. )
  748. self.assertNotIn(
  749. stripdir,
  750. str(err, encoding=sys.getdefaultencoding())
  751. )
  752. def test_multiple_optimization_levels(self):
  753. path = os.path.join(self.directory, "optimizations")
  754. os.makedirs(path)
  755. script = script_helper.make_script(path,
  756. "test_optimization",
  757. "a = 0")
  758. bc = []
  759. for opt_level in "", 1, 2, 3:
  760. bc.append(importlib.util.cache_from_source(script,
  761. optimization=opt_level))
  762. test_combinations = [["0", "1"],
  763. ["1", "2"],
  764. ["0", "2"],
  765. ["0", "1", "2"]]
  766. for opt_combination in test_combinations:
  767. self.assertRunOK(path, *("-o" + str(n) for n in opt_combination))
  768. for opt_level in opt_combination:
  769. self.assertTrue(os.path.isfile(bc[int(opt_level)]))
  770. try:
  771. os.unlink(bc[opt_level])
  772. except Exception:
  773. pass
  774. @os_helper.skip_unless_symlink
  775. def test_ignore_symlink_destination(self):
  776. # Create folders for allowed files, symlinks and prohibited area
  777. allowed_path = os.path.join(self.directory, "test", "dir", "allowed")
  778. symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks")
  779. prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited")
  780. os.makedirs(allowed_path)
  781. os.makedirs(symlinks_path)
  782. os.makedirs(prohibited_path)
  783. # Create scripts and symlinks and remember their byte-compiled versions
  784. allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0")
  785. prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0")
  786. allowed_symlink = os.path.join(symlinks_path, "test_allowed.py")
  787. prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py")
  788. os.symlink(allowed_script, allowed_symlink)
  789. os.symlink(prohibited_script, prohibited_symlink)
  790. allowed_bc = importlib.util.cache_from_source(allowed_symlink)
  791. prohibited_bc = importlib.util.cache_from_source(prohibited_symlink)
  792. self.assertRunOK(symlinks_path, "-e", allowed_path)
  793. self.assertTrue(os.path.isfile(allowed_bc))
  794. self.assertFalse(os.path.isfile(prohibited_bc))
  795. def test_hardlink_bad_args(self):
  796. # Bad arguments combination, hardlink deduplication make sense
  797. # only for more than one optimization level
  798. self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes")
  799. def test_hardlink(self):
  800. # 'a = 0' code produces the same bytecode for the 3 optimization
  801. # levels. All three .pyc files must have the same inode (hardlinks).
  802. #
  803. # If deduplication is disabled, all pyc files must have different
  804. # inodes.
  805. for dedup in (True, False):
  806. with tempfile.TemporaryDirectory() as path:
  807. with self.subTest(dedup=dedup):
  808. script = script_helper.make_script(path, "script", "a = 0")
  809. pycs = get_pycs(script)
  810. args = ["-q", "-o 0", "-o 1", "-o 2"]
  811. if dedup:
  812. args.append("--hardlink-dupes")
  813. self.assertRunOK(path, *args)
  814. self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup)
  815. self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup)
  816. self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup)
  817. class CommandLineTestsWithSourceEpoch(CommandLineTestsBase,
  818. unittest.TestCase,
  819. metaclass=SourceDateEpochTestMeta,
  820. source_date_epoch=True):
  821. pass
  822. class CommandLineTestsNoSourceEpoch(CommandLineTestsBase,
  823. unittest.TestCase,
  824. metaclass=SourceDateEpochTestMeta,
  825. source_date_epoch=False):
  826. pass
  827. @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link')
  828. class HardlinkDedupTestsBase:
  829. # Test hardlink_dupes parameter of compileall.compile_dir()
  830. def setUp(self):
  831. self.path = None
  832. @contextlib.contextmanager
  833. def temporary_directory(self):
  834. with tempfile.TemporaryDirectory() as path:
  835. self.path = path
  836. yield path
  837. self.path = None
  838. def make_script(self, code, name="script"):
  839. return script_helper.make_script(self.path, name, code)
  840. def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False):
  841. compileall.compile_dir(self.path, quiet=True, optimize=optimize,
  842. hardlink_dupes=dedup, force=force)
  843. def test_bad_args(self):
  844. # Bad arguments combination, hardlink deduplication make sense
  845. # only for more than one optimization level
  846. with self.temporary_directory():
  847. self.make_script("pass")
  848. with self.assertRaises(ValueError):
  849. compileall.compile_dir(self.path, quiet=True, optimize=0,
  850. hardlink_dupes=True)
  851. with self.assertRaises(ValueError):
  852. # same optimization level specified twice:
  853. # compile_dir() removes duplicates
  854. compileall.compile_dir(self.path, quiet=True, optimize=[0, 0],
  855. hardlink_dupes=True)
  856. def create_code(self, docstring=False, assertion=False):
  857. lines = []
  858. if docstring:
  859. lines.append("'module docstring'")
  860. lines.append('x = 1')
  861. if assertion:
  862. lines.append("assert x == 1")
  863. return '\n'.join(lines)
  864. def iter_codes(self):
  865. for docstring in (False, True):
  866. for assertion in (False, True):
  867. code = self.create_code(docstring=docstring, assertion=assertion)
  868. yield (code, docstring, assertion)
  869. def test_disabled(self):
  870. # Deduplication disabled, no hardlinks
  871. for code, docstring, assertion in self.iter_codes():
  872. with self.subTest(docstring=docstring, assertion=assertion):
  873. with self.temporary_directory():
  874. script = self.make_script(code)
  875. pycs = get_pycs(script)
  876. self.compile_dir(dedup=False)
  877. self.assertFalse(is_hardlink(pycs[0], pycs[1]))
  878. self.assertFalse(is_hardlink(pycs[0], pycs[2]))
  879. self.assertFalse(is_hardlink(pycs[1], pycs[2]))
  880. def check_hardlinks(self, script, docstring=False, assertion=False):
  881. pycs = get_pycs(script)
  882. self.assertEqual(is_hardlink(pycs[0], pycs[1]),
  883. not assertion)
  884. self.assertEqual(is_hardlink(pycs[0], pycs[2]),
  885. not assertion and not docstring)
  886. self.assertEqual(is_hardlink(pycs[1], pycs[2]),
  887. not docstring)
  888. def test_hardlink(self):
  889. # Test deduplication on all combinations
  890. for code, docstring, assertion in self.iter_codes():
  891. with self.subTest(docstring=docstring, assertion=assertion):
  892. with self.temporary_directory():
  893. script = self.make_script(code)
  894. self.compile_dir()
  895. self.check_hardlinks(script, docstring, assertion)
  896. def test_only_two_levels(self):
  897. # Don't build the 3 optimization levels, but only 2
  898. for opts in ((0, 1), (1, 2), (0, 2)):
  899. with self.subTest(opts=opts):
  900. with self.temporary_directory():
  901. # code with no dostring and no assertion:
  902. # same bytecode for all optimization levels
  903. script = self.make_script(self.create_code())
  904. self.compile_dir(optimize=opts)
  905. pyc1 = get_pyc(script, opts[0])
  906. pyc2 = get_pyc(script, opts[1])
  907. self.assertTrue(is_hardlink(pyc1, pyc2))
  908. def test_duplicated_levels(self):
  909. # compile_dir() must not fail if optimize contains duplicated
  910. # optimization levels and/or if optimization levels are not sorted.
  911. with self.temporary_directory():
  912. # code with no dostring and no assertion:
  913. # same bytecode for all optimization levels
  914. script = self.make_script(self.create_code())
  915. self.compile_dir(optimize=[1, 0, 1, 0])
  916. pyc1 = get_pyc(script, 0)
  917. pyc2 = get_pyc(script, 1)
  918. self.assertTrue(is_hardlink(pyc1, pyc2))
  919. def test_recompilation(self):
  920. # Test compile_dir() when pyc files already exists and the script
  921. # content changed
  922. with self.temporary_directory():
  923. script = self.make_script("a = 0")
  924. self.compile_dir()
  925. # All three levels have the same inode
  926. self.check_hardlinks(script)
  927. pycs = get_pycs(script)
  928. inode = os.stat(pycs[0]).st_ino
  929. # Change of the module content
  930. script = self.make_script("print(0)")
  931. # Recompilation without -o 1
  932. self.compile_dir(optimize=[0, 2], force=True)
  933. # opt-1.pyc should have the same inode as before and others should not
  934. self.assertEqual(inode, os.stat(pycs[1]).st_ino)
  935. self.assertTrue(is_hardlink(pycs[0], pycs[2]))
  936. self.assertNotEqual(inode, os.stat(pycs[2]).st_ino)
  937. # opt-1.pyc and opt-2.pyc have different content
  938. self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True))
  939. def test_import(self):
  940. # Test that import updates a single pyc file when pyc files already
  941. # exists and the script content changed
  942. with self.temporary_directory():
  943. script = self.make_script(self.create_code(), name="module")
  944. self.compile_dir()
  945. # All three levels have the same inode
  946. self.check_hardlinks(script)
  947. pycs = get_pycs(script)
  948. inode = os.stat(pycs[0]).st_ino
  949. # Change of the module content
  950. script = self.make_script("print(0)", name="module")
  951. # Import the module in Python with -O (optimization level 1)
  952. script_helper.assert_python_ok(
  953. "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path
  954. )
  955. # Only opt-1.pyc is changed
  956. self.assertEqual(inode, os.stat(pycs[0]).st_ino)
  957. self.assertEqual(inode, os.stat(pycs[2]).st_ino)
  958. self.assertFalse(is_hardlink(pycs[1], pycs[2]))
  959. # opt-1.pyc and opt-2.pyc have different content
  960. self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True))
  961. class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase,
  962. unittest.TestCase,
  963. metaclass=SourceDateEpochTestMeta,
  964. source_date_epoch=True):
  965. pass
  966. class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase,
  967. unittest.TestCase,
  968. metaclass=SourceDateEpochTestMeta,
  969. source_date_epoch=False):
  970. pass
  971. if __name__ == "__main__":
  972. unittest.main()