1import compileall 2import contextlib 3import filecmp 4import importlib.util 5import io 6import os 7import pathlib 8import py_compile 9import shutil 10import struct 11import sys 12import tempfile 13import test.test_importlib.util 14import time 15import unittest 16 17from unittest import mock, skipUnless 18from concurrent.futures import ProcessPoolExecutor 19try: 20 # compileall relies on ProcessPoolExecutor if ProcessPoolExecutor exists 21 # and it can function. 22 from concurrent.futures.process import _check_system_limits 23 _check_system_limits() 24 _have_multiprocessing = True 25except NotImplementedError: 26 _have_multiprocessing = False 27 28from test import support 29from test.support import os_helper 30from test.support import script_helper 31from test.test_py_compile import without_source_date_epoch 32from test.test_py_compile import SourceDateEpochTestMeta 33 34 35def get_pyc(script, opt): 36 if not opt: 37 # Replace None and 0 with '' 38 opt = '' 39 return importlib.util.cache_from_source(script, optimization=opt) 40 41 42def get_pycs(script): 43 return [get_pyc(script, opt) for opt in (0, 1, 2)] 44 45 46def is_hardlink(filename1, filename2): 47 """Returns True if two files have the same inode (hardlink)""" 48 inode1 = os.stat(filename1).st_ino 49 inode2 = os.stat(filename2).st_ino 50 return inode1 == inode2 51 52 53class CompileallTestsBase: 54 55 def setUp(self): 56 self.directory = tempfile.mkdtemp() 57 self.source_path = os.path.join(self.directory, '_test.py') 58 self.bc_path = importlib.util.cache_from_source(self.source_path) 59 with open(self.source_path, 'w', encoding="utf-8") as file: 60 file.write('x = 123\n') 61 self.source_path2 = os.path.join(self.directory, '_test2.py') 62 self.bc_path2 = importlib.util.cache_from_source(self.source_path2) 63 shutil.copyfile(self.source_path, self.source_path2) 64 self.subdirectory = os.path.join(self.directory, '_subdir') 65 os.mkdir(self.subdirectory) 66 self.source_path3 = os.path.join(self.subdirectory, '_test3.py') 67 shutil.copyfile(self.source_path, self.source_path3) 68 69 def tearDown(self): 70 shutil.rmtree(self.directory) 71 72 def add_bad_source_file(self): 73 self.bad_source_path = os.path.join(self.directory, '_test_bad.py') 74 with open(self.bad_source_path, 'w', encoding="utf-8") as file: 75 file.write('x (\n') 76 77 def timestamp_metadata(self): 78 with open(self.bc_path, 'rb') as file: 79 data = file.read(12) 80 mtime = int(os.stat(self.source_path).st_mtime) 81 compare = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER, 0, 82 mtime & 0xFFFF_FFFF) 83 return data, compare 84 85 def test_year_2038_mtime_compilation(self): 86 # Test to make sure we can handle mtimes larger than what a 32-bit 87 # signed number can hold as part of bpo-34990 88 try: 89 os.utime(self.source_path, (2**32 - 1, 2**32 - 1)) 90 except (OverflowError, OSError): 91 self.skipTest("filesystem doesn't support timestamps near 2**32") 92 with contextlib.redirect_stdout(io.StringIO()): 93 self.assertTrue(compileall.compile_file(self.source_path)) 94 95 def test_larger_than_32_bit_times(self): 96 # This is similar to the test above but we skip it if the OS doesn't 97 # support modification times larger than 32-bits. 98 try: 99 os.utime(self.source_path, (2**35, 2**35)) 100 except (OverflowError, OSError): 101 self.skipTest("filesystem doesn't support large timestamps") 102 with contextlib.redirect_stdout(io.StringIO()): 103 self.assertTrue(compileall.compile_file(self.source_path)) 104 105 def recreation_check(self, metadata): 106 """Check that compileall recreates bytecode when the new metadata is 107 used.""" 108 if os.environ.get('SOURCE_DATE_EPOCH'): 109 raise unittest.SkipTest('SOURCE_DATE_EPOCH is set') 110 py_compile.compile(self.source_path) 111 self.assertEqual(*self.timestamp_metadata()) 112 with open(self.bc_path, 'rb') as file: 113 bc = file.read()[len(metadata):] 114 with open(self.bc_path, 'wb') as file: 115 file.write(metadata) 116 file.write(bc) 117 self.assertNotEqual(*self.timestamp_metadata()) 118 compileall.compile_dir(self.directory, force=False, quiet=True) 119 self.assertTrue(*self.timestamp_metadata()) 120 121 def test_mtime(self): 122 # Test a change in mtime leads to a new .pyc. 123 self.recreation_check(struct.pack('<4sLL', importlib.util.MAGIC_NUMBER, 124 0, 1)) 125 126 def test_magic_number(self): 127 # Test a change in mtime leads to a new .pyc. 128 self.recreation_check(b'\0\0\0\0') 129 130 def test_compile_files(self): 131 # Test compiling a single file, and complete directory 132 for fn in (self.bc_path, self.bc_path2): 133 try: 134 os.unlink(fn) 135 except: 136 pass 137 self.assertTrue(compileall.compile_file(self.source_path, 138 force=False, quiet=True)) 139 self.assertTrue(os.path.isfile(self.bc_path) and 140 not os.path.isfile(self.bc_path2)) 141 os.unlink(self.bc_path) 142 self.assertTrue(compileall.compile_dir(self.directory, force=False, 143 quiet=True)) 144 self.assertTrue(os.path.isfile(self.bc_path) and 145 os.path.isfile(self.bc_path2)) 146 os.unlink(self.bc_path) 147 os.unlink(self.bc_path2) 148 # Test against bad files 149 self.add_bad_source_file() 150 self.assertFalse(compileall.compile_file(self.bad_source_path, 151 force=False, quiet=2)) 152 self.assertFalse(compileall.compile_dir(self.directory, 153 force=False, quiet=2)) 154 155 def test_compile_file_pathlike(self): 156 self.assertFalse(os.path.isfile(self.bc_path)) 157 # we should also test the output 158 with support.captured_stdout() as stdout: 159 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path))) 160 self.assertRegex(stdout.getvalue(), r'Compiling ([^WindowsPath|PosixPath].*)') 161 self.assertTrue(os.path.isfile(self.bc_path)) 162 163 def test_compile_file_pathlike_ddir(self): 164 self.assertFalse(os.path.isfile(self.bc_path)) 165 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), 166 ddir=pathlib.Path('ddir_path'), 167 quiet=2)) 168 self.assertTrue(os.path.isfile(self.bc_path)) 169 170 def test_compile_path(self): 171 with test.test_importlib.util.import_state(path=[self.directory]): 172 self.assertTrue(compileall.compile_path(quiet=2)) 173 174 with test.test_importlib.util.import_state(path=[self.directory]): 175 self.add_bad_source_file() 176 self.assertFalse(compileall.compile_path(skip_curdir=False, 177 force=True, quiet=2)) 178 179 def test_no_pycache_in_non_package(self): 180 # Bug 8563 reported that __pycache__ directories got created by 181 # compile_file() for non-.py files. 182 data_dir = os.path.join(self.directory, 'data') 183 data_file = os.path.join(data_dir, 'file') 184 os.mkdir(data_dir) 185 # touch data/file 186 with open(data_file, 'wb'): 187 pass 188 compileall.compile_file(data_file) 189 self.assertFalse(os.path.exists(os.path.join(data_dir, '__pycache__'))) 190 191 192 def test_compile_file_encoding_fallback(self): 193 # Bug 44666 reported that compile_file failed when sys.stdout.encoding is None 194 self.add_bad_source_file() 195 with contextlib.redirect_stdout(io.StringIO()): 196 self.assertFalse(compileall.compile_file(self.bad_source_path)) 197 198 199 def test_optimize(self): 200 # make sure compiling with different optimization settings than the 201 # interpreter's creates the correct file names 202 optimize, opt = (1, 1) if __debug__ else (0, '') 203 compileall.compile_dir(self.directory, quiet=True, optimize=optimize) 204 cached = importlib.util.cache_from_source(self.source_path, 205 optimization=opt) 206 self.assertTrue(os.path.isfile(cached)) 207 cached2 = importlib.util.cache_from_source(self.source_path2, 208 optimization=opt) 209 self.assertTrue(os.path.isfile(cached2)) 210 cached3 = importlib.util.cache_from_source(self.source_path3, 211 optimization=opt) 212 self.assertTrue(os.path.isfile(cached3)) 213 214 def test_compile_dir_pathlike(self): 215 self.assertFalse(os.path.isfile(self.bc_path)) 216 with support.captured_stdout() as stdout: 217 compileall.compile_dir(pathlib.Path(self.directory)) 218 line = stdout.getvalue().splitlines()[0] 219 self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)') 220 self.assertTrue(os.path.isfile(self.bc_path)) 221 222 @skipUnless(_have_multiprocessing, "requires multiprocessing") 223 @mock.patch('concurrent.futures.ProcessPoolExecutor') 224 def test_compile_pool_called(self, pool_mock): 225 compileall.compile_dir(self.directory, quiet=True, workers=5) 226 self.assertTrue(pool_mock.called) 227 228 def test_compile_workers_non_positive(self): 229 with self.assertRaisesRegex(ValueError, 230 "workers must be greater or equal to 0"): 231 compileall.compile_dir(self.directory, workers=-1) 232 233 @skipUnless(_have_multiprocessing, "requires multiprocessing") 234 @mock.patch('concurrent.futures.ProcessPoolExecutor') 235 def test_compile_workers_cpu_count(self, pool_mock): 236 compileall.compile_dir(self.directory, quiet=True, workers=0) 237 self.assertEqual(pool_mock.call_args[1]['max_workers'], None) 238 239 @skipUnless(_have_multiprocessing, "requires multiprocessing") 240 @mock.patch('concurrent.futures.ProcessPoolExecutor') 241 @mock.patch('compileall.compile_file') 242 def test_compile_one_worker(self, compile_file_mock, pool_mock): 243 compileall.compile_dir(self.directory, quiet=True) 244 self.assertFalse(pool_mock.called) 245 self.assertTrue(compile_file_mock.called) 246 247 @mock.patch('concurrent.futures.ProcessPoolExecutor', new=None) 248 @mock.patch('compileall.compile_file') 249 def test_compile_missing_multiprocessing(self, compile_file_mock): 250 compileall.compile_dir(self.directory, quiet=True, workers=5) 251 self.assertTrue(compile_file_mock.called) 252 253 def test_compile_dir_maxlevels(self): 254 # Test the actual impact of maxlevels parameter 255 depth = 3 256 path = self.directory 257 for i in range(1, depth + 1): 258 path = os.path.join(path, f"dir_{i}") 259 source = os.path.join(path, 'script.py') 260 os.mkdir(path) 261 shutil.copyfile(self.source_path, source) 262 pyc_filename = importlib.util.cache_from_source(source) 263 264 compileall.compile_dir(self.directory, quiet=True, maxlevels=depth - 1) 265 self.assertFalse(os.path.isfile(pyc_filename)) 266 267 compileall.compile_dir(self.directory, quiet=True, maxlevels=depth) 268 self.assertTrue(os.path.isfile(pyc_filename)) 269 270 def _test_ddir_only(self, *, ddir, parallel=True): 271 """Recursive compile_dir ddir must contain package paths; bpo39769.""" 272 fullpath = ["test", "foo"] 273 path = self.directory 274 mods = [] 275 for subdir in fullpath: 276 path = os.path.join(path, subdir) 277 os.mkdir(path) 278 script_helper.make_script(path, "__init__", "") 279 mods.append(script_helper.make_script(path, "mod", 280 "def fn(): 1/0\nfn()\n")) 281 compileall.compile_dir( 282 self.directory, quiet=True, ddir=ddir, 283 workers=2 if parallel else 1) 284 self.assertTrue(mods) 285 for mod in mods: 286 self.assertTrue(mod.startswith(self.directory), mod) 287 modcode = importlib.util.cache_from_source(mod) 288 modpath = mod[len(self.directory+os.sep):] 289 _, _, err = script_helper.assert_python_failure(modcode) 290 expected_in = os.path.join(ddir, modpath) 291 mod_code_obj = test.test_importlib.util.get_code_from_pyc(modcode) 292 self.assertEqual(mod_code_obj.co_filename, expected_in) 293 self.assertIn(f'"{expected_in}"', os.fsdecode(err)) 294 295 def test_ddir_only_one_worker(self): 296 """Recursive compile_dir ddir= contains package paths; bpo39769.""" 297 return self._test_ddir_only(ddir="<a prefix>", parallel=False) 298 299 def test_ddir_multiple_workers(self): 300 """Recursive compile_dir ddir= contains package paths; bpo39769.""" 301 return self._test_ddir_only(ddir="<a prefix>", parallel=True) 302 303 def test_ddir_empty_only_one_worker(self): 304 """Recursive compile_dir ddir='' contains package paths; bpo39769.""" 305 return self._test_ddir_only(ddir="", parallel=False) 306 307 def test_ddir_empty_multiple_workers(self): 308 """Recursive compile_dir ddir='' contains package paths; bpo39769.""" 309 return self._test_ddir_only(ddir="", parallel=True) 310 311 def test_strip_only(self): 312 fullpath = ["test", "build", "real", "path"] 313 path = os.path.join(self.directory, *fullpath) 314 os.makedirs(path) 315 script = script_helper.make_script(path, "test", "1 / 0") 316 bc = importlib.util.cache_from_source(script) 317 stripdir = os.path.join(self.directory, *fullpath[:2]) 318 compileall.compile_dir(path, quiet=True, stripdir=stripdir) 319 rc, out, err = script_helper.assert_python_failure(bc) 320 expected_in = os.path.join(*fullpath[2:]) 321 self.assertIn( 322 expected_in, 323 str(err, encoding=sys.getdefaultencoding()) 324 ) 325 self.assertNotIn( 326 stripdir, 327 str(err, encoding=sys.getdefaultencoding()) 328 ) 329 330 def test_prepend_only(self): 331 fullpath = ["test", "build", "real", "path"] 332 path = os.path.join(self.directory, *fullpath) 333 os.makedirs(path) 334 script = script_helper.make_script(path, "test", "1 / 0") 335 bc = importlib.util.cache_from_source(script) 336 prependdir = "/foo" 337 compileall.compile_dir(path, quiet=True, prependdir=prependdir) 338 rc, out, err = script_helper.assert_python_failure(bc) 339 expected_in = os.path.join(prependdir, self.directory, *fullpath) 340 self.assertIn( 341 expected_in, 342 str(err, encoding=sys.getdefaultencoding()) 343 ) 344 345 def test_strip_and_prepend(self): 346 fullpath = ["test", "build", "real", "path"] 347 path = os.path.join(self.directory, *fullpath) 348 os.makedirs(path) 349 script = script_helper.make_script(path, "test", "1 / 0") 350 bc = importlib.util.cache_from_source(script) 351 stripdir = os.path.join(self.directory, *fullpath[:2]) 352 prependdir = "/foo" 353 compileall.compile_dir(path, quiet=True, 354 stripdir=stripdir, prependdir=prependdir) 355 rc, out, err = script_helper.assert_python_failure(bc) 356 expected_in = os.path.join(prependdir, *fullpath[2:]) 357 self.assertIn( 358 expected_in, 359 str(err, encoding=sys.getdefaultencoding()) 360 ) 361 self.assertNotIn( 362 stripdir, 363 str(err, encoding=sys.getdefaultencoding()) 364 ) 365 366 def test_strip_prepend_and_ddir(self): 367 fullpath = ["test", "build", "real", "path", "ddir"] 368 path = os.path.join(self.directory, *fullpath) 369 os.makedirs(path) 370 script_helper.make_script(path, "test", "1 / 0") 371 with self.assertRaises(ValueError): 372 compileall.compile_dir(path, quiet=True, ddir="/bar", 373 stripdir="/foo", prependdir="/bar") 374 375 def test_multiple_optimization_levels(self): 376 script = script_helper.make_script(self.directory, 377 "test_optimization", 378 "a = 0") 379 bc = [] 380 for opt_level in "", 1, 2, 3: 381 bc.append(importlib.util.cache_from_source(script, 382 optimization=opt_level)) 383 test_combinations = [[0, 1], [1, 2], [0, 2], [0, 1, 2]] 384 for opt_combination in test_combinations: 385 compileall.compile_file(script, quiet=True, 386 optimize=opt_combination) 387 for opt_level in opt_combination: 388 self.assertTrue(os.path.isfile(bc[opt_level])) 389 try: 390 os.unlink(bc[opt_level]) 391 except Exception: 392 pass 393 394 @os_helper.skip_unless_symlink 395 def test_ignore_symlink_destination(self): 396 # Create folders for allowed files, symlinks and prohibited area 397 allowed_path = os.path.join(self.directory, "test", "dir", "allowed") 398 symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks") 399 prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited") 400 os.makedirs(allowed_path) 401 os.makedirs(symlinks_path) 402 os.makedirs(prohibited_path) 403 404 # Create scripts and symlinks and remember their byte-compiled versions 405 allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0") 406 prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0") 407 allowed_symlink = os.path.join(symlinks_path, "test_allowed.py") 408 prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py") 409 os.symlink(allowed_script, allowed_symlink) 410 os.symlink(prohibited_script, prohibited_symlink) 411 allowed_bc = importlib.util.cache_from_source(allowed_symlink) 412 prohibited_bc = importlib.util.cache_from_source(prohibited_symlink) 413 414 compileall.compile_dir(symlinks_path, quiet=True, limit_sl_dest=allowed_path) 415 416 self.assertTrue(os.path.isfile(allowed_bc)) 417 self.assertFalse(os.path.isfile(prohibited_bc)) 418 419 420class CompileallTestsWithSourceEpoch(CompileallTestsBase, 421 unittest.TestCase, 422 metaclass=SourceDateEpochTestMeta, 423 source_date_epoch=True): 424 pass 425 426 427class CompileallTestsWithoutSourceEpoch(CompileallTestsBase, 428 unittest.TestCase, 429 metaclass=SourceDateEpochTestMeta, 430 source_date_epoch=False): 431 pass 432 433 434class EncodingTest(unittest.TestCase): 435 """Issue 6716: compileall should escape source code when printing errors 436 to stdout.""" 437 438 def setUp(self): 439 self.directory = tempfile.mkdtemp() 440 self.source_path = os.path.join(self.directory, '_test.py') 441 with open(self.source_path, 'w', encoding='utf-8') as file: 442 file.write('# -*- coding: utf-8 -*-\n') 443 file.write('print u"\u20ac"\n') 444 445 def tearDown(self): 446 shutil.rmtree(self.directory) 447 448 def test_error(self): 449 try: 450 orig_stdout = sys.stdout 451 sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii') 452 compileall.compile_dir(self.directory) 453 finally: 454 sys.stdout = orig_stdout 455 456 457class CommandLineTestsBase: 458 """Test compileall's CLI.""" 459 460 @classmethod 461 def setUpClass(cls): 462 for path in filter(os.path.isdir, sys.path): 463 directory_created = False 464 directory = pathlib.Path(path) / '__pycache__' 465 path = directory / 'test.try' 466 try: 467 if not directory.is_dir(): 468 directory.mkdir() 469 directory_created = True 470 path.write_text('# for test_compileall', encoding="utf-8") 471 except OSError: 472 sys_path_writable = False 473 break 474 finally: 475 os_helper.unlink(str(path)) 476 if directory_created: 477 directory.rmdir() 478 else: 479 sys_path_writable = True 480 cls._sys_path_writable = sys_path_writable 481 482 def _skip_if_sys_path_not_writable(self): 483 if not self._sys_path_writable: 484 raise unittest.SkipTest('not all entries on sys.path are writable') 485 486 def _get_run_args(self, args): 487 return [*support.optim_args_from_interpreter_flags(), 488 '-S', '-m', 'compileall', 489 *args] 490 491 def assertRunOK(self, *args, **env_vars): 492 rc, out, err = script_helper.assert_python_ok( 493 *self._get_run_args(args), **env_vars, 494 PYTHONIOENCODING='utf-8') 495 self.assertEqual(b'', err) 496 return out 497 498 def assertRunNotOK(self, *args, **env_vars): 499 rc, out, err = script_helper.assert_python_failure( 500 *self._get_run_args(args), **env_vars, 501 PYTHONIOENCODING='utf-8') 502 return rc, out, err 503 504 def assertCompiled(self, fn): 505 path = importlib.util.cache_from_source(fn) 506 self.assertTrue(os.path.exists(path)) 507 508 def assertNotCompiled(self, fn): 509 path = importlib.util.cache_from_source(fn) 510 self.assertFalse(os.path.exists(path)) 511 512 def setUp(self): 513 self.directory = tempfile.mkdtemp() 514 self.addCleanup(os_helper.rmtree, self.directory) 515 self.pkgdir = os.path.join(self.directory, 'foo') 516 os.mkdir(self.pkgdir) 517 self.pkgdir_cachedir = os.path.join(self.pkgdir, '__pycache__') 518 # Create the __init__.py and a package module. 519 self.initfn = script_helper.make_script(self.pkgdir, '__init__', '') 520 self.barfn = script_helper.make_script(self.pkgdir, 'bar', '') 521 522 def test_no_args_compiles_path(self): 523 # Note that -l is implied for the no args case. 524 self._skip_if_sys_path_not_writable() 525 bazfn = script_helper.make_script(self.directory, 'baz', '') 526 self.assertRunOK(PYTHONPATH=self.directory) 527 self.assertCompiled(bazfn) 528 self.assertNotCompiled(self.initfn) 529 self.assertNotCompiled(self.barfn) 530 531 @without_source_date_epoch # timestamp invalidation test 532 def test_no_args_respects_force_flag(self): 533 self._skip_if_sys_path_not_writable() 534 bazfn = script_helper.make_script(self.directory, 'baz', '') 535 self.assertRunOK(PYTHONPATH=self.directory) 536 pycpath = importlib.util.cache_from_source(bazfn) 537 # Set atime/mtime backward to avoid file timestamp resolution issues 538 os.utime(pycpath, (time.time()-60,)*2) 539 mtime = os.stat(pycpath).st_mtime 540 # Without force, no recompilation 541 self.assertRunOK(PYTHONPATH=self.directory) 542 mtime2 = os.stat(pycpath).st_mtime 543 self.assertEqual(mtime, mtime2) 544 # Now force it. 545 self.assertRunOK('-f', PYTHONPATH=self.directory) 546 mtime2 = os.stat(pycpath).st_mtime 547 self.assertNotEqual(mtime, mtime2) 548 549 def test_no_args_respects_quiet_flag(self): 550 self._skip_if_sys_path_not_writable() 551 script_helper.make_script(self.directory, 'baz', '') 552 noisy = self.assertRunOK(PYTHONPATH=self.directory) 553 self.assertIn(b'Listing ', noisy) 554 quiet = self.assertRunOK('-q', PYTHONPATH=self.directory) 555 self.assertNotIn(b'Listing ', quiet) 556 557 # Ensure that the default behavior of compileall's CLI is to create 558 # PEP 3147/PEP 488 pyc files. 559 for name, ext, switch in [ 560 ('normal', 'pyc', []), 561 ('optimize', 'opt-1.pyc', ['-O']), 562 ('doubleoptimize', 'opt-2.pyc', ['-OO']), 563 ]: 564 def f(self, ext=ext, switch=switch): 565 script_helper.assert_python_ok(*(switch + 566 ['-m', 'compileall', '-q', self.pkgdir])) 567 # Verify the __pycache__ directory contents. 568 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 569 expected = sorted(base.format(sys.implementation.cache_tag, ext) 570 for base in ('__init__.{}.{}', 'bar.{}.{}')) 571 self.assertEqual(sorted(os.listdir(self.pkgdir_cachedir)), expected) 572 # Make sure there are no .pyc files in the source directory. 573 self.assertFalse([fn for fn in os.listdir(self.pkgdir) 574 if fn.endswith(ext)]) 575 locals()['test_pep3147_paths_' + name] = f 576 577 def test_legacy_paths(self): 578 # Ensure that with the proper switch, compileall leaves legacy 579 # pyc files, and no __pycache__ directory. 580 self.assertRunOK('-b', '-q', self.pkgdir) 581 # Verify the __pycache__ directory contents. 582 self.assertFalse(os.path.exists(self.pkgdir_cachedir)) 583 expected = sorted(['__init__.py', '__init__.pyc', 'bar.py', 584 'bar.pyc']) 585 self.assertEqual(sorted(os.listdir(self.pkgdir)), expected) 586 587 def test_multiple_runs(self): 588 # Bug 8527 reported that multiple calls produced empty 589 # __pycache__/__pycache__ directories. 590 self.assertRunOK('-q', self.pkgdir) 591 # Verify the __pycache__ directory contents. 592 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 593 cachecachedir = os.path.join(self.pkgdir_cachedir, '__pycache__') 594 self.assertFalse(os.path.exists(cachecachedir)) 595 # Call compileall again. 596 self.assertRunOK('-q', self.pkgdir) 597 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 598 self.assertFalse(os.path.exists(cachecachedir)) 599 600 @without_source_date_epoch # timestamp invalidation test 601 def test_force(self): 602 self.assertRunOK('-q', self.pkgdir) 603 pycpath = importlib.util.cache_from_source(self.barfn) 604 # set atime/mtime backward to avoid file timestamp resolution issues 605 os.utime(pycpath, (time.time()-60,)*2) 606 mtime = os.stat(pycpath).st_mtime 607 # without force, no recompilation 608 self.assertRunOK('-q', self.pkgdir) 609 mtime2 = os.stat(pycpath).st_mtime 610 self.assertEqual(mtime, mtime2) 611 # now force it. 612 self.assertRunOK('-q', '-f', self.pkgdir) 613 mtime2 = os.stat(pycpath).st_mtime 614 self.assertNotEqual(mtime, mtime2) 615 616 def test_recursion_control(self): 617 subpackage = os.path.join(self.pkgdir, 'spam') 618 os.mkdir(subpackage) 619 subinitfn = script_helper.make_script(subpackage, '__init__', '') 620 hamfn = script_helper.make_script(subpackage, 'ham', '') 621 self.assertRunOK('-q', '-l', self.pkgdir) 622 self.assertNotCompiled(subinitfn) 623 self.assertFalse(os.path.exists(os.path.join(subpackage, '__pycache__'))) 624 self.assertRunOK('-q', self.pkgdir) 625 self.assertCompiled(subinitfn) 626 self.assertCompiled(hamfn) 627 628 def test_recursion_limit(self): 629 subpackage = os.path.join(self.pkgdir, 'spam') 630 subpackage2 = os.path.join(subpackage, 'ham') 631 subpackage3 = os.path.join(subpackage2, 'eggs') 632 for pkg in (subpackage, subpackage2, subpackage3): 633 script_helper.make_pkg(pkg) 634 635 subinitfn = os.path.join(subpackage, '__init__.py') 636 hamfn = script_helper.make_script(subpackage, 'ham', '') 637 spamfn = script_helper.make_script(subpackage2, 'spam', '') 638 eggfn = script_helper.make_script(subpackage3, 'egg', '') 639 640 self.assertRunOK('-q', '-r 0', self.pkgdir) 641 self.assertNotCompiled(subinitfn) 642 self.assertFalse( 643 os.path.exists(os.path.join(subpackage, '__pycache__'))) 644 645 self.assertRunOK('-q', '-r 1', self.pkgdir) 646 self.assertCompiled(subinitfn) 647 self.assertCompiled(hamfn) 648 self.assertNotCompiled(spamfn) 649 650 self.assertRunOK('-q', '-r 2', self.pkgdir) 651 self.assertCompiled(subinitfn) 652 self.assertCompiled(hamfn) 653 self.assertCompiled(spamfn) 654 self.assertNotCompiled(eggfn) 655 656 self.assertRunOK('-q', '-r 5', self.pkgdir) 657 self.assertCompiled(subinitfn) 658 self.assertCompiled(hamfn) 659 self.assertCompiled(spamfn) 660 self.assertCompiled(eggfn) 661 662 @os_helper.skip_unless_symlink 663 def test_symlink_loop(self): 664 # Currently, compileall ignores symlinks to directories. 665 # If that limitation is ever lifted, it should protect against 666 # recursion in symlink loops. 667 pkg = os.path.join(self.pkgdir, 'spam') 668 script_helper.make_pkg(pkg) 669 os.symlink('.', os.path.join(pkg, 'evil')) 670 os.symlink('.', os.path.join(pkg, 'evil2')) 671 self.assertRunOK('-q', self.pkgdir) 672 self.assertCompiled(os.path.join( 673 self.pkgdir, 'spam', 'evil', 'evil2', '__init__.py' 674 )) 675 676 def test_quiet(self): 677 noisy = self.assertRunOK(self.pkgdir) 678 quiet = self.assertRunOK('-q', self.pkgdir) 679 self.assertNotEqual(b'', noisy) 680 self.assertEqual(b'', quiet) 681 682 def test_silent(self): 683 script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax') 684 _, quiet, _ = self.assertRunNotOK('-q', self.pkgdir) 685 _, silent, _ = self.assertRunNotOK('-qq', self.pkgdir) 686 self.assertNotEqual(b'', quiet) 687 self.assertEqual(b'', silent) 688 689 def test_regexp(self): 690 self.assertRunOK('-q', '-x', r'ba[^\\/]*$', self.pkgdir) 691 self.assertNotCompiled(self.barfn) 692 self.assertCompiled(self.initfn) 693 694 def test_multiple_dirs(self): 695 pkgdir2 = os.path.join(self.directory, 'foo2') 696 os.mkdir(pkgdir2) 697 init2fn = script_helper.make_script(pkgdir2, '__init__', '') 698 bar2fn = script_helper.make_script(pkgdir2, 'bar2', '') 699 self.assertRunOK('-q', self.pkgdir, pkgdir2) 700 self.assertCompiled(self.initfn) 701 self.assertCompiled(self.barfn) 702 self.assertCompiled(init2fn) 703 self.assertCompiled(bar2fn) 704 705 def test_d_compile_error(self): 706 script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax') 707 rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir) 708 self.assertRegex(out, b'File "dinsdale') 709 710 def test_d_runtime_error(self): 711 bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception') 712 self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir) 713 fn = script_helper.make_script(self.pkgdir, 'bing', 'import baz') 714 pyc = importlib.util.cache_from_source(bazfn) 715 os.rename(pyc, os.path.join(self.pkgdir, 'baz.pyc')) 716 os.remove(bazfn) 717 rc, out, err = script_helper.assert_python_failure(fn, __isolated=False) 718 self.assertRegex(err, b'File "dinsdale') 719 720 def test_include_bad_file(self): 721 rc, out, err = self.assertRunNotOK( 722 '-i', os.path.join(self.directory, 'nosuchfile'), self.pkgdir) 723 self.assertRegex(out, b'rror.*nosuchfile') 724 self.assertNotRegex(err, b'Traceback') 725 self.assertFalse(os.path.exists(importlib.util.cache_from_source( 726 self.pkgdir_cachedir))) 727 728 def test_include_file_with_arg(self): 729 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 730 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 731 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 732 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 733 with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1: 734 l1.write(os.path.join(self.pkgdir, 'f1.py')+os.linesep) 735 l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep) 736 self.assertRunOK('-i', os.path.join(self.directory, 'l1'), f4) 737 self.assertCompiled(f1) 738 self.assertCompiled(f2) 739 self.assertNotCompiled(f3) 740 self.assertCompiled(f4) 741 742 def test_include_file_no_arg(self): 743 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 744 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 745 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 746 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 747 with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1: 748 l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep) 749 self.assertRunOK('-i', os.path.join(self.directory, 'l1')) 750 self.assertNotCompiled(f1) 751 self.assertCompiled(f2) 752 self.assertNotCompiled(f3) 753 self.assertNotCompiled(f4) 754 755 def test_include_on_stdin(self): 756 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 757 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 758 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 759 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 760 p = script_helper.spawn_python(*(self._get_run_args(()) + ['-i', '-'])) 761 p.stdin.write((f3+os.linesep).encode('ascii')) 762 script_helper.kill_python(p) 763 self.assertNotCompiled(f1) 764 self.assertNotCompiled(f2) 765 self.assertCompiled(f3) 766 self.assertNotCompiled(f4) 767 768 def test_compiles_as_much_as_possible(self): 769 bingfn = script_helper.make_script(self.pkgdir, 'bing', 'syntax(error') 770 rc, out, err = self.assertRunNotOK('nosuchfile', self.initfn, 771 bingfn, self.barfn) 772 self.assertRegex(out, b'rror') 773 self.assertNotCompiled(bingfn) 774 self.assertCompiled(self.initfn) 775 self.assertCompiled(self.barfn) 776 777 def test_invalid_arg_produces_message(self): 778 out = self.assertRunOK('badfilename') 779 self.assertRegex(out, b"Can't list 'badfilename'") 780 781 def test_pyc_invalidation_mode(self): 782 script_helper.make_script(self.pkgdir, 'f1', '') 783 pyc = importlib.util.cache_from_source( 784 os.path.join(self.pkgdir, 'f1.py')) 785 self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir) 786 with open(pyc, 'rb') as fp: 787 data = fp.read() 788 self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11) 789 self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir) 790 with open(pyc, 'rb') as fp: 791 data = fp.read() 792 self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b01) 793 794 @skipUnless(_have_multiprocessing, "requires multiprocessing") 795 def test_workers(self): 796 bar2fn = script_helper.make_script(self.directory, 'bar2', '') 797 files = [] 798 for suffix in range(5): 799 pkgdir = os.path.join(self.directory, 'foo{}'.format(suffix)) 800 os.mkdir(pkgdir) 801 fn = script_helper.make_script(pkgdir, '__init__', '') 802 files.append(script_helper.make_script(pkgdir, 'bar2', '')) 803 804 self.assertRunOK(self.directory, '-j', '0') 805 self.assertCompiled(bar2fn) 806 for file in files: 807 self.assertCompiled(file) 808 809 @mock.patch('compileall.compile_dir') 810 def test_workers_available_cores(self, compile_dir): 811 with mock.patch("sys.argv", 812 new=[sys.executable, self.directory, "-j0"]): 813 compileall.main() 814 self.assertTrue(compile_dir.called) 815 self.assertEqual(compile_dir.call_args[-1]['workers'], 0) 816 817 def test_strip_and_prepend(self): 818 fullpath = ["test", "build", "real", "path"] 819 path = os.path.join(self.directory, *fullpath) 820 os.makedirs(path) 821 script = script_helper.make_script(path, "test", "1 / 0") 822 bc = importlib.util.cache_from_source(script) 823 stripdir = os.path.join(self.directory, *fullpath[:2]) 824 prependdir = "/foo" 825 self.assertRunOK("-s", stripdir, "-p", prependdir, path) 826 rc, out, err = script_helper.assert_python_failure(bc) 827 expected_in = os.path.join(prependdir, *fullpath[2:]) 828 self.assertIn( 829 expected_in, 830 str(err, encoding=sys.getdefaultencoding()) 831 ) 832 self.assertNotIn( 833 stripdir, 834 str(err, encoding=sys.getdefaultencoding()) 835 ) 836 837 def test_multiple_optimization_levels(self): 838 path = os.path.join(self.directory, "optimizations") 839 os.makedirs(path) 840 script = script_helper.make_script(path, 841 "test_optimization", 842 "a = 0") 843 bc = [] 844 for opt_level in "", 1, 2, 3: 845 bc.append(importlib.util.cache_from_source(script, 846 optimization=opt_level)) 847 test_combinations = [["0", "1"], 848 ["1", "2"], 849 ["0", "2"], 850 ["0", "1", "2"]] 851 for opt_combination in test_combinations: 852 self.assertRunOK(path, *("-o" + str(n) for n in opt_combination)) 853 for opt_level in opt_combination: 854 self.assertTrue(os.path.isfile(bc[int(opt_level)])) 855 try: 856 os.unlink(bc[opt_level]) 857 except Exception: 858 pass 859 860 @os_helper.skip_unless_symlink 861 def test_ignore_symlink_destination(self): 862 # Create folders for allowed files, symlinks and prohibited area 863 allowed_path = os.path.join(self.directory, "test", "dir", "allowed") 864 symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks") 865 prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited") 866 os.makedirs(allowed_path) 867 os.makedirs(symlinks_path) 868 os.makedirs(prohibited_path) 869 870 # Create scripts and symlinks and remember their byte-compiled versions 871 allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0") 872 prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0") 873 allowed_symlink = os.path.join(symlinks_path, "test_allowed.py") 874 prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py") 875 os.symlink(allowed_script, allowed_symlink) 876 os.symlink(prohibited_script, prohibited_symlink) 877 allowed_bc = importlib.util.cache_from_source(allowed_symlink) 878 prohibited_bc = importlib.util.cache_from_source(prohibited_symlink) 879 880 self.assertRunOK(symlinks_path, "-e", allowed_path) 881 882 self.assertTrue(os.path.isfile(allowed_bc)) 883 self.assertFalse(os.path.isfile(prohibited_bc)) 884 885 def test_hardlink_bad_args(self): 886 # Bad arguments combination, hardlink deduplication make sense 887 # only for more than one optimization level 888 self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes") 889 890 def test_hardlink(self): 891 # 'a = 0' code produces the same bytecode for the 3 optimization 892 # levels. All three .pyc files must have the same inode (hardlinks). 893 # 894 # If deduplication is disabled, all pyc files must have different 895 # inodes. 896 for dedup in (True, False): 897 with tempfile.TemporaryDirectory() as path: 898 with self.subTest(dedup=dedup): 899 script = script_helper.make_script(path, "script", "a = 0") 900 pycs = get_pycs(script) 901 902 args = ["-q", "-o 0", "-o 1", "-o 2"] 903 if dedup: 904 args.append("--hardlink-dupes") 905 self.assertRunOK(path, *args) 906 907 self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup) 908 self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup) 909 self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup) 910 911 912class CommandLineTestsWithSourceEpoch(CommandLineTestsBase, 913 unittest.TestCase, 914 metaclass=SourceDateEpochTestMeta, 915 source_date_epoch=True): 916 pass 917 918 919class CommandLineTestsNoSourceEpoch(CommandLineTestsBase, 920 unittest.TestCase, 921 metaclass=SourceDateEpochTestMeta, 922 source_date_epoch=False): 923 pass 924 925 926 927class HardlinkDedupTestsBase: 928 # Test hardlink_dupes parameter of compileall.compile_dir() 929 930 def setUp(self): 931 self.path = None 932 933 @contextlib.contextmanager 934 def temporary_directory(self): 935 with tempfile.TemporaryDirectory() as path: 936 self.path = path 937 yield path 938 self.path = None 939 940 def make_script(self, code, name="script"): 941 return script_helper.make_script(self.path, name, code) 942 943 def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False): 944 compileall.compile_dir(self.path, quiet=True, optimize=optimize, 945 hardlink_dupes=dedup, force=force) 946 947 def test_bad_args(self): 948 # Bad arguments combination, hardlink deduplication make sense 949 # only for more than one optimization level 950 with self.temporary_directory(): 951 self.make_script("pass") 952 with self.assertRaises(ValueError): 953 compileall.compile_dir(self.path, quiet=True, optimize=0, 954 hardlink_dupes=True) 955 with self.assertRaises(ValueError): 956 # same optimization level specified twice: 957 # compile_dir() removes duplicates 958 compileall.compile_dir(self.path, quiet=True, optimize=[0, 0], 959 hardlink_dupes=True) 960 961 def create_code(self, docstring=False, assertion=False): 962 lines = [] 963 if docstring: 964 lines.append("'module docstring'") 965 lines.append('x = 1') 966 if assertion: 967 lines.append("assert x == 1") 968 return '\n'.join(lines) 969 970 def iter_codes(self): 971 for docstring in (False, True): 972 for assertion in (False, True): 973 code = self.create_code(docstring=docstring, assertion=assertion) 974 yield (code, docstring, assertion) 975 976 def test_disabled(self): 977 # Deduplication disabled, no hardlinks 978 for code, docstring, assertion in self.iter_codes(): 979 with self.subTest(docstring=docstring, assertion=assertion): 980 with self.temporary_directory(): 981 script = self.make_script(code) 982 pycs = get_pycs(script) 983 self.compile_dir(dedup=False) 984 self.assertFalse(is_hardlink(pycs[0], pycs[1])) 985 self.assertFalse(is_hardlink(pycs[0], pycs[2])) 986 self.assertFalse(is_hardlink(pycs[1], pycs[2])) 987 988 def check_hardlinks(self, script, docstring=False, assertion=False): 989 pycs = get_pycs(script) 990 self.assertEqual(is_hardlink(pycs[0], pycs[1]), 991 not assertion) 992 self.assertEqual(is_hardlink(pycs[0], pycs[2]), 993 not assertion and not docstring) 994 self.assertEqual(is_hardlink(pycs[1], pycs[2]), 995 not docstring) 996 997 def test_hardlink(self): 998 # Test deduplication on all combinations 999 for code, docstring, assertion in self.iter_codes(): 1000 with self.subTest(docstring=docstring, assertion=assertion): 1001 with self.temporary_directory(): 1002 script = self.make_script(code) 1003 self.compile_dir() 1004 self.check_hardlinks(script, docstring, assertion) 1005 1006 def test_only_two_levels(self): 1007 # Don't build the 3 optimization levels, but only 2 1008 for opts in ((0, 1), (1, 2), (0, 2)): 1009 with self.subTest(opts=opts): 1010 with self.temporary_directory(): 1011 # code with no dostring and no assertion: 1012 # same bytecode for all optimization levels 1013 script = self.make_script(self.create_code()) 1014 self.compile_dir(optimize=opts) 1015 pyc1 = get_pyc(script, opts[0]) 1016 pyc2 = get_pyc(script, opts[1]) 1017 self.assertTrue(is_hardlink(pyc1, pyc2)) 1018 1019 def test_duplicated_levels(self): 1020 # compile_dir() must not fail if optimize contains duplicated 1021 # optimization levels and/or if optimization levels are not sorted. 1022 with self.temporary_directory(): 1023 # code with no dostring and no assertion: 1024 # same bytecode for all optimization levels 1025 script = self.make_script(self.create_code()) 1026 self.compile_dir(optimize=[1, 0, 1, 0]) 1027 pyc1 = get_pyc(script, 0) 1028 pyc2 = get_pyc(script, 1) 1029 self.assertTrue(is_hardlink(pyc1, pyc2)) 1030 1031 def test_recompilation(self): 1032 # Test compile_dir() when pyc files already exists and the script 1033 # content changed 1034 with self.temporary_directory(): 1035 script = self.make_script("a = 0") 1036 self.compile_dir() 1037 # All three levels have the same inode 1038 self.check_hardlinks(script) 1039 1040 pycs = get_pycs(script) 1041 inode = os.stat(pycs[0]).st_ino 1042 1043 # Change of the module content 1044 script = self.make_script("print(0)") 1045 1046 # Recompilation without -o 1 1047 self.compile_dir(optimize=[0, 2], force=True) 1048 1049 # opt-1.pyc should have the same inode as before and others should not 1050 self.assertEqual(inode, os.stat(pycs[1]).st_ino) 1051 self.assertTrue(is_hardlink(pycs[0], pycs[2])) 1052 self.assertNotEqual(inode, os.stat(pycs[2]).st_ino) 1053 # opt-1.pyc and opt-2.pyc have different content 1054 self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) 1055 1056 def test_import(self): 1057 # Test that import updates a single pyc file when pyc files already 1058 # exists and the script content changed 1059 with self.temporary_directory(): 1060 script = self.make_script(self.create_code(), name="module") 1061 self.compile_dir() 1062 # All three levels have the same inode 1063 self.check_hardlinks(script) 1064 1065 pycs = get_pycs(script) 1066 inode = os.stat(pycs[0]).st_ino 1067 1068 # Change of the module content 1069 script = self.make_script("print(0)", name="module") 1070 1071 # Import the module in Python with -O (optimization level 1) 1072 script_helper.assert_python_ok( 1073 "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path 1074 ) 1075 1076 # Only opt-1.pyc is changed 1077 self.assertEqual(inode, os.stat(pycs[0]).st_ino) 1078 self.assertEqual(inode, os.stat(pycs[2]).st_ino) 1079 self.assertFalse(is_hardlink(pycs[1], pycs[2])) 1080 # opt-1.pyc and opt-2.pyc have different content 1081 self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) 1082 1083 1084class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase, 1085 unittest.TestCase, 1086 metaclass=SourceDateEpochTestMeta, 1087 source_date_epoch=True): 1088 pass 1089 1090 1091class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase, 1092 unittest.TestCase, 1093 metaclass=SourceDateEpochTestMeta, 1094 source_date_epoch=False): 1095 pass 1096 1097 1098if __name__ == "__main__": 1099 unittest.main() 1100