1import compileall 2import contextlib 3import filecmp 4import importlib.util 5import io 6import itertools 7import os 8import pathlib 9import py_compile 10import shutil 11import struct 12import sys 13import tempfile 14import test.test_importlib.util 15import time 16import unittest 17 18from unittest import mock, skipUnless 19try: 20 from concurrent.futures import ProcessPoolExecutor 21 _have_multiprocessing = True 22except ImportError: 23 _have_multiprocessing = False 24 25from test import support 26from test.support import script_helper 27 28from .test_py_compile import without_source_date_epoch 29from .test_py_compile import SourceDateEpochTestMeta 30 31 32def get_pyc(script, opt): 33 if not opt: 34 # Replace None and 0 with '' 35 opt = '' 36 return importlib.util.cache_from_source(script, optimization=opt) 37 38 39def get_pycs(script): 40 return [get_pyc(script, opt) for opt in (0, 1, 2)] 41 42 43def is_hardlink(filename1, filename2): 44 """Returns True if two files have the same inode (hardlink)""" 45 inode1 = os.stat(filename1).st_ino 46 inode2 = os.stat(filename2).st_ino 47 return inode1 == inode2 48 49 50class CompileallTestsBase: 51 52 def setUp(self): 53 self.directory = tempfile.mkdtemp() 54 self.source_path = os.path.join(self.directory, '_test.py') 55 self.bc_path = importlib.util.cache_from_source(self.source_path) 56 with open(self.source_path, 'w') as file: 57 file.write('x = 123\n') 58 self.source_path2 = os.path.join(self.directory, '_test2.py') 59 self.bc_path2 = importlib.util.cache_from_source(self.source_path2) 60 shutil.copyfile(self.source_path, self.source_path2) 61 self.subdirectory = os.path.join(self.directory, '_subdir') 62 os.mkdir(self.subdirectory) 63 self.source_path3 = os.path.join(self.subdirectory, '_test3.py') 64 shutil.copyfile(self.source_path, self.source_path3) 65 66 def tearDown(self): 67 shutil.rmtree(self.directory) 68 69 def add_bad_source_file(self): 70 self.bad_source_path = os.path.join(self.directory, '_test_bad.py') 71 with open(self.bad_source_path, 'w') as file: 72 file.write('x (\n') 73 74 def timestamp_metadata(self): 75 with open(self.bc_path, 'rb') as file: 76 data = file.read(12) 77 mtime = int(os.stat(self.source_path).st_mtime) 78 compare = struct.pack('<4sll', importlib.util.MAGIC_NUMBER, 0, mtime) 79 return data, compare 80 81 def recreation_check(self, metadata): 82 """Check that compileall recreates bytecode when the new metadata is 83 used.""" 84 if os.environ.get('SOURCE_DATE_EPOCH'): 85 raise unittest.SkipTest('SOURCE_DATE_EPOCH is set') 86 py_compile.compile(self.source_path) 87 self.assertEqual(*self.timestamp_metadata()) 88 with open(self.bc_path, 'rb') as file: 89 bc = file.read()[len(metadata):] 90 with open(self.bc_path, 'wb') as file: 91 file.write(metadata) 92 file.write(bc) 93 self.assertNotEqual(*self.timestamp_metadata()) 94 compileall.compile_dir(self.directory, force=False, quiet=True) 95 self.assertTrue(*self.timestamp_metadata()) 96 97 def test_mtime(self): 98 # Test a change in mtime leads to a new .pyc. 99 self.recreation_check(struct.pack('<4sll', importlib.util.MAGIC_NUMBER, 100 0, 1)) 101 102 def test_magic_number(self): 103 # Test a change in mtime leads to a new .pyc. 104 self.recreation_check(b'\0\0\0\0') 105 106 def test_compile_files(self): 107 # Test compiling a single file, and complete directory 108 for fn in (self.bc_path, self.bc_path2): 109 try: 110 os.unlink(fn) 111 except: 112 pass 113 self.assertTrue(compileall.compile_file(self.source_path, 114 force=False, quiet=True)) 115 self.assertTrue(os.path.isfile(self.bc_path) and 116 not os.path.isfile(self.bc_path2)) 117 os.unlink(self.bc_path) 118 self.assertTrue(compileall.compile_dir(self.directory, force=False, 119 quiet=True)) 120 self.assertTrue(os.path.isfile(self.bc_path) and 121 os.path.isfile(self.bc_path2)) 122 os.unlink(self.bc_path) 123 os.unlink(self.bc_path2) 124 # Test against bad files 125 self.add_bad_source_file() 126 self.assertFalse(compileall.compile_file(self.bad_source_path, 127 force=False, quiet=2)) 128 self.assertFalse(compileall.compile_dir(self.directory, 129 force=False, quiet=2)) 130 131 def test_compile_file_pathlike(self): 132 self.assertFalse(os.path.isfile(self.bc_path)) 133 # we should also test the output 134 with support.captured_stdout() as stdout: 135 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path))) 136 self.assertRegex(stdout.getvalue(), r'Compiling ([^WindowsPath|PosixPath].*)') 137 self.assertTrue(os.path.isfile(self.bc_path)) 138 139 def test_compile_file_pathlike_ddir(self): 140 self.assertFalse(os.path.isfile(self.bc_path)) 141 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), 142 ddir=pathlib.Path('ddir_path'), 143 quiet=2)) 144 self.assertTrue(os.path.isfile(self.bc_path)) 145 146 def test_compile_path(self): 147 with test.test_importlib.util.import_state(path=[self.directory]): 148 self.assertTrue(compileall.compile_path(quiet=2)) 149 150 with test.test_importlib.util.import_state(path=[self.directory]): 151 self.add_bad_source_file() 152 self.assertFalse(compileall.compile_path(skip_curdir=False, 153 force=True, quiet=2)) 154 155 def test_no_pycache_in_non_package(self): 156 # Bug 8563 reported that __pycache__ directories got created by 157 # compile_file() for non-.py files. 158 data_dir = os.path.join(self.directory, 'data') 159 data_file = os.path.join(data_dir, 'file') 160 os.mkdir(data_dir) 161 # touch data/file 162 with open(data_file, 'w'): 163 pass 164 compileall.compile_file(data_file) 165 self.assertFalse(os.path.exists(os.path.join(data_dir, '__pycache__'))) 166 167 def test_optimize(self): 168 # make sure compiling with different optimization settings than the 169 # interpreter's creates the correct file names 170 optimize, opt = (1, 1) if __debug__ else (0, '') 171 compileall.compile_dir(self.directory, quiet=True, optimize=optimize) 172 cached = importlib.util.cache_from_source(self.source_path, 173 optimization=opt) 174 self.assertTrue(os.path.isfile(cached)) 175 cached2 = importlib.util.cache_from_source(self.source_path2, 176 optimization=opt) 177 self.assertTrue(os.path.isfile(cached2)) 178 cached3 = importlib.util.cache_from_source(self.source_path3, 179 optimization=opt) 180 self.assertTrue(os.path.isfile(cached3)) 181 182 def test_compile_dir_pathlike(self): 183 self.assertFalse(os.path.isfile(self.bc_path)) 184 with support.captured_stdout() as stdout: 185 compileall.compile_dir(pathlib.Path(self.directory)) 186 line = stdout.getvalue().splitlines()[0] 187 self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)') 188 self.assertTrue(os.path.isfile(self.bc_path)) 189 190 @mock.patch('concurrent.futures.ProcessPoolExecutor') 191 def test_compile_pool_called(self, pool_mock): 192 compileall.compile_dir(self.directory, quiet=True, workers=5) 193 self.assertTrue(pool_mock.called) 194 195 def test_compile_workers_non_positive(self): 196 with self.assertRaisesRegex(ValueError, 197 "workers must be greater or equal to 0"): 198 compileall.compile_dir(self.directory, workers=-1) 199 200 @mock.patch('concurrent.futures.ProcessPoolExecutor') 201 def test_compile_workers_cpu_count(self, pool_mock): 202 compileall.compile_dir(self.directory, quiet=True, workers=0) 203 self.assertEqual(pool_mock.call_args[1]['max_workers'], None) 204 205 @mock.patch('concurrent.futures.ProcessPoolExecutor') 206 @mock.patch('compileall.compile_file') 207 def test_compile_one_worker(self, compile_file_mock, pool_mock): 208 compileall.compile_dir(self.directory, quiet=True) 209 self.assertFalse(pool_mock.called) 210 self.assertTrue(compile_file_mock.called) 211 212 @mock.patch('concurrent.futures.ProcessPoolExecutor', new=None) 213 @mock.patch('compileall.compile_file') 214 def test_compile_missing_multiprocessing(self, compile_file_mock): 215 compileall.compile_dir(self.directory, quiet=True, workers=5) 216 self.assertTrue(compile_file_mock.called) 217 218 def test_compile_dir_maxlevels(self): 219 # Test the actual impact of maxlevels parameter 220 depth = 3 221 path = self.directory 222 for i in range(1, depth + 1): 223 path = os.path.join(path, f"dir_{i}") 224 source = os.path.join(path, 'script.py') 225 os.mkdir(path) 226 shutil.copyfile(self.source_path, source) 227 pyc_filename = importlib.util.cache_from_source(source) 228 229 compileall.compile_dir(self.directory, quiet=True, maxlevels=depth - 1) 230 self.assertFalse(os.path.isfile(pyc_filename)) 231 232 compileall.compile_dir(self.directory, quiet=True, maxlevels=depth) 233 self.assertTrue(os.path.isfile(pyc_filename)) 234 235 def _test_ddir_only(self, *, ddir, parallel=True): 236 """Recursive compile_dir ddir must contain package paths; bpo39769.""" 237 fullpath = ["test", "foo"] 238 path = self.directory 239 mods = [] 240 for subdir in fullpath: 241 path = os.path.join(path, subdir) 242 os.mkdir(path) 243 script_helper.make_script(path, "__init__", "") 244 mods.append(script_helper.make_script(path, "mod", 245 "def fn(): 1/0\nfn()\n")) 246 compileall.compile_dir( 247 self.directory, quiet=True, ddir=ddir, 248 workers=2 if parallel else 1) 249 self.assertTrue(mods) 250 for mod in mods: 251 self.assertTrue(mod.startswith(self.directory), mod) 252 modcode = importlib.util.cache_from_source(mod) 253 modpath = mod[len(self.directory+os.sep):] 254 _, _, err = script_helper.assert_python_failure(modcode) 255 expected_in = os.path.join(ddir, modpath) 256 mod_code_obj = test.test_importlib.util.get_code_from_pyc(modcode) 257 self.assertEqual(mod_code_obj.co_filename, expected_in) 258 self.assertIn(f'"{expected_in}"', os.fsdecode(err)) 259 260 def test_ddir_only_one_worker(self): 261 """Recursive compile_dir ddir= contains package paths; bpo39769.""" 262 return self._test_ddir_only(ddir="<a prefix>", parallel=False) 263 264 def test_ddir_multiple_workers(self): 265 """Recursive compile_dir ddir= contains package paths; bpo39769.""" 266 return self._test_ddir_only(ddir="<a prefix>", parallel=True) 267 268 def test_ddir_empty_only_one_worker(self): 269 """Recursive compile_dir ddir='' contains package paths; bpo39769.""" 270 return self._test_ddir_only(ddir="", parallel=False) 271 272 def test_ddir_empty_multiple_workers(self): 273 """Recursive compile_dir ddir='' contains package paths; bpo39769.""" 274 return self._test_ddir_only(ddir="", parallel=True) 275 276 def test_strip_only(self): 277 fullpath = ["test", "build", "real", "path"] 278 path = os.path.join(self.directory, *fullpath) 279 os.makedirs(path) 280 script = script_helper.make_script(path, "test", "1 / 0") 281 bc = importlib.util.cache_from_source(script) 282 stripdir = os.path.join(self.directory, *fullpath[:2]) 283 compileall.compile_dir(path, quiet=True, stripdir=stripdir) 284 rc, out, err = script_helper.assert_python_failure(bc) 285 expected_in = os.path.join(*fullpath[2:]) 286 self.assertIn( 287 expected_in, 288 str(err, encoding=sys.getdefaultencoding()) 289 ) 290 self.assertNotIn( 291 stripdir, 292 str(err, encoding=sys.getdefaultencoding()) 293 ) 294 295 def test_prepend_only(self): 296 fullpath = ["test", "build", "real", "path"] 297 path = os.path.join(self.directory, *fullpath) 298 os.makedirs(path) 299 script = script_helper.make_script(path, "test", "1 / 0") 300 bc = importlib.util.cache_from_source(script) 301 prependdir = "/foo" 302 compileall.compile_dir(path, quiet=True, prependdir=prependdir) 303 rc, out, err = script_helper.assert_python_failure(bc) 304 expected_in = os.path.join(prependdir, self.directory, *fullpath) 305 self.assertIn( 306 expected_in, 307 str(err, encoding=sys.getdefaultencoding()) 308 ) 309 310 def test_strip_and_prepend(self): 311 fullpath = ["test", "build", "real", "path"] 312 path = os.path.join(self.directory, *fullpath) 313 os.makedirs(path) 314 script = script_helper.make_script(path, "test", "1 / 0") 315 bc = importlib.util.cache_from_source(script) 316 stripdir = os.path.join(self.directory, *fullpath[:2]) 317 prependdir = "/foo" 318 compileall.compile_dir(path, quiet=True, 319 stripdir=stripdir, prependdir=prependdir) 320 rc, out, err = script_helper.assert_python_failure(bc) 321 expected_in = os.path.join(prependdir, *fullpath[2:]) 322 self.assertIn( 323 expected_in, 324 str(err, encoding=sys.getdefaultencoding()) 325 ) 326 self.assertNotIn( 327 stripdir, 328 str(err, encoding=sys.getdefaultencoding()) 329 ) 330 331 def test_strip_prepend_and_ddir(self): 332 fullpath = ["test", "build", "real", "path", "ddir"] 333 path = os.path.join(self.directory, *fullpath) 334 os.makedirs(path) 335 script_helper.make_script(path, "test", "1 / 0") 336 with self.assertRaises(ValueError): 337 compileall.compile_dir(path, quiet=True, ddir="/bar", 338 stripdir="/foo", prependdir="/bar") 339 340 def test_multiple_optimization_levels(self): 341 script = script_helper.make_script(self.directory, 342 "test_optimization", 343 "a = 0") 344 bc = [] 345 for opt_level in "", 1, 2, 3: 346 bc.append(importlib.util.cache_from_source(script, 347 optimization=opt_level)) 348 test_combinations = [[0, 1], [1, 2], [0, 2], [0, 1, 2]] 349 for opt_combination in test_combinations: 350 compileall.compile_file(script, quiet=True, 351 optimize=opt_combination) 352 for opt_level in opt_combination: 353 self.assertTrue(os.path.isfile(bc[opt_level])) 354 try: 355 os.unlink(bc[opt_level]) 356 except Exception: 357 pass 358 359 @support.skip_unless_symlink 360 def test_ignore_symlink_destination(self): 361 # Create folders for allowed files, symlinks and prohibited area 362 allowed_path = os.path.join(self.directory, "test", "dir", "allowed") 363 symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks") 364 prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited") 365 os.makedirs(allowed_path) 366 os.makedirs(symlinks_path) 367 os.makedirs(prohibited_path) 368 369 # Create scripts and symlinks and remember their byte-compiled versions 370 allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0") 371 prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0") 372 allowed_symlink = os.path.join(symlinks_path, "test_allowed.py") 373 prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py") 374 os.symlink(allowed_script, allowed_symlink) 375 os.symlink(prohibited_script, prohibited_symlink) 376 allowed_bc = importlib.util.cache_from_source(allowed_symlink) 377 prohibited_bc = importlib.util.cache_from_source(prohibited_symlink) 378 379 compileall.compile_dir(symlinks_path, quiet=True, limit_sl_dest=allowed_path) 380 381 self.assertTrue(os.path.isfile(allowed_bc)) 382 self.assertFalse(os.path.isfile(prohibited_bc)) 383 384 385class CompileallTestsWithSourceEpoch(CompileallTestsBase, 386 unittest.TestCase, 387 metaclass=SourceDateEpochTestMeta, 388 source_date_epoch=True): 389 pass 390 391 392class CompileallTestsWithoutSourceEpoch(CompileallTestsBase, 393 unittest.TestCase, 394 metaclass=SourceDateEpochTestMeta, 395 source_date_epoch=False): 396 pass 397 398 399class EncodingTest(unittest.TestCase): 400 """Issue 6716: compileall should escape source code when printing errors 401 to stdout.""" 402 403 def setUp(self): 404 self.directory = tempfile.mkdtemp() 405 self.source_path = os.path.join(self.directory, '_test.py') 406 with open(self.source_path, 'w', encoding='utf-8') as file: 407 file.write('# -*- coding: utf-8 -*-\n') 408 file.write('print u"\u20ac"\n') 409 410 def tearDown(self): 411 shutil.rmtree(self.directory) 412 413 def test_error(self): 414 try: 415 orig_stdout = sys.stdout 416 sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii') 417 compileall.compile_dir(self.directory) 418 finally: 419 sys.stdout = orig_stdout 420 421 422class CommandLineTestsBase: 423 """Test compileall's CLI.""" 424 425 @classmethod 426 def setUpClass(cls): 427 for path in filter(os.path.isdir, sys.path): 428 directory_created = False 429 directory = pathlib.Path(path) / '__pycache__' 430 path = directory / 'test.try' 431 try: 432 if not directory.is_dir(): 433 directory.mkdir() 434 directory_created = True 435 with path.open('w') as file: 436 file.write('# for test_compileall') 437 except OSError: 438 sys_path_writable = False 439 break 440 finally: 441 support.unlink(str(path)) 442 if directory_created: 443 directory.rmdir() 444 else: 445 sys_path_writable = True 446 cls._sys_path_writable = sys_path_writable 447 448 def _skip_if_sys_path_not_writable(self): 449 if not self._sys_path_writable: 450 raise unittest.SkipTest('not all entries on sys.path are writable') 451 452 def _get_run_args(self, args): 453 return [*support.optim_args_from_interpreter_flags(), 454 '-S', '-m', 'compileall', 455 *args] 456 457 def assertRunOK(self, *args, **env_vars): 458 rc, out, err = script_helper.assert_python_ok( 459 *self._get_run_args(args), **env_vars, 460 PYTHONIOENCODING='utf-8') 461 self.assertEqual(b'', err) 462 return out 463 464 def assertRunNotOK(self, *args, **env_vars): 465 rc, out, err = script_helper.assert_python_failure( 466 *self._get_run_args(args), **env_vars, 467 PYTHONIOENCODING='utf-8') 468 return rc, out, err 469 470 def assertCompiled(self, fn): 471 path = importlib.util.cache_from_source(fn) 472 self.assertTrue(os.path.exists(path)) 473 474 def assertNotCompiled(self, fn): 475 path = importlib.util.cache_from_source(fn) 476 self.assertFalse(os.path.exists(path)) 477 478 def setUp(self): 479 self.directory = tempfile.mkdtemp() 480 self.addCleanup(support.rmtree, self.directory) 481 self.pkgdir = os.path.join(self.directory, 'foo') 482 os.mkdir(self.pkgdir) 483 self.pkgdir_cachedir = os.path.join(self.pkgdir, '__pycache__') 484 # Create the __init__.py and a package module. 485 self.initfn = script_helper.make_script(self.pkgdir, '__init__', '') 486 self.barfn = script_helper.make_script(self.pkgdir, 'bar', '') 487 488 def test_no_args_compiles_path(self): 489 # Note that -l is implied for the no args case. 490 self._skip_if_sys_path_not_writable() 491 bazfn = script_helper.make_script(self.directory, 'baz', '') 492 self.assertRunOK(PYTHONPATH=self.directory) 493 self.assertCompiled(bazfn) 494 self.assertNotCompiled(self.initfn) 495 self.assertNotCompiled(self.barfn) 496 497 @without_source_date_epoch # timestamp invalidation test 498 def test_no_args_respects_force_flag(self): 499 self._skip_if_sys_path_not_writable() 500 bazfn = script_helper.make_script(self.directory, 'baz', '') 501 self.assertRunOK(PYTHONPATH=self.directory) 502 pycpath = importlib.util.cache_from_source(bazfn) 503 # Set atime/mtime backward to avoid file timestamp resolution issues 504 os.utime(pycpath, (time.time()-60,)*2) 505 mtime = os.stat(pycpath).st_mtime 506 # Without force, no recompilation 507 self.assertRunOK(PYTHONPATH=self.directory) 508 mtime2 = os.stat(pycpath).st_mtime 509 self.assertEqual(mtime, mtime2) 510 # Now force it. 511 self.assertRunOK('-f', PYTHONPATH=self.directory) 512 mtime2 = os.stat(pycpath).st_mtime 513 self.assertNotEqual(mtime, mtime2) 514 515 def test_no_args_respects_quiet_flag(self): 516 self._skip_if_sys_path_not_writable() 517 script_helper.make_script(self.directory, 'baz', '') 518 noisy = self.assertRunOK(PYTHONPATH=self.directory) 519 self.assertIn(b'Listing ', noisy) 520 quiet = self.assertRunOK('-q', PYTHONPATH=self.directory) 521 self.assertNotIn(b'Listing ', quiet) 522 523 # Ensure that the default behavior of compileall's CLI is to create 524 # PEP 3147/PEP 488 pyc files. 525 for name, ext, switch in [ 526 ('normal', 'pyc', []), 527 ('optimize', 'opt-1.pyc', ['-O']), 528 ('doubleoptimize', 'opt-2.pyc', ['-OO']), 529 ]: 530 def f(self, ext=ext, switch=switch): 531 script_helper.assert_python_ok(*(switch + 532 ['-m', 'compileall', '-q', self.pkgdir])) 533 # Verify the __pycache__ directory contents. 534 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 535 expected = sorted(base.format(sys.implementation.cache_tag, ext) 536 for base in ('__init__.{}.{}', 'bar.{}.{}')) 537 self.assertEqual(sorted(os.listdir(self.pkgdir_cachedir)), expected) 538 # Make sure there are no .pyc files in the source directory. 539 self.assertFalse([fn for fn in os.listdir(self.pkgdir) 540 if fn.endswith(ext)]) 541 locals()['test_pep3147_paths_' + name] = f 542 543 def test_legacy_paths(self): 544 # Ensure that with the proper switch, compileall leaves legacy 545 # pyc files, and no __pycache__ directory. 546 self.assertRunOK('-b', '-q', self.pkgdir) 547 # Verify the __pycache__ directory contents. 548 self.assertFalse(os.path.exists(self.pkgdir_cachedir)) 549 expected = sorted(['__init__.py', '__init__.pyc', 'bar.py', 550 'bar.pyc']) 551 self.assertEqual(sorted(os.listdir(self.pkgdir)), expected) 552 553 def test_multiple_runs(self): 554 # Bug 8527 reported that multiple calls produced empty 555 # __pycache__/__pycache__ directories. 556 self.assertRunOK('-q', self.pkgdir) 557 # Verify the __pycache__ directory contents. 558 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 559 cachecachedir = os.path.join(self.pkgdir_cachedir, '__pycache__') 560 self.assertFalse(os.path.exists(cachecachedir)) 561 # Call compileall again. 562 self.assertRunOK('-q', self.pkgdir) 563 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 564 self.assertFalse(os.path.exists(cachecachedir)) 565 566 @without_source_date_epoch # timestamp invalidation test 567 def test_force(self): 568 self.assertRunOK('-q', self.pkgdir) 569 pycpath = importlib.util.cache_from_source(self.barfn) 570 # set atime/mtime backward to avoid file timestamp resolution issues 571 os.utime(pycpath, (time.time()-60,)*2) 572 mtime = os.stat(pycpath).st_mtime 573 # without force, no recompilation 574 self.assertRunOK('-q', self.pkgdir) 575 mtime2 = os.stat(pycpath).st_mtime 576 self.assertEqual(mtime, mtime2) 577 # now force it. 578 self.assertRunOK('-q', '-f', self.pkgdir) 579 mtime2 = os.stat(pycpath).st_mtime 580 self.assertNotEqual(mtime, mtime2) 581 582 def test_recursion_control(self): 583 subpackage = os.path.join(self.pkgdir, 'spam') 584 os.mkdir(subpackage) 585 subinitfn = script_helper.make_script(subpackage, '__init__', '') 586 hamfn = script_helper.make_script(subpackage, 'ham', '') 587 self.assertRunOK('-q', '-l', self.pkgdir) 588 self.assertNotCompiled(subinitfn) 589 self.assertFalse(os.path.exists(os.path.join(subpackage, '__pycache__'))) 590 self.assertRunOK('-q', self.pkgdir) 591 self.assertCompiled(subinitfn) 592 self.assertCompiled(hamfn) 593 594 def test_recursion_limit(self): 595 subpackage = os.path.join(self.pkgdir, 'spam') 596 subpackage2 = os.path.join(subpackage, 'ham') 597 subpackage3 = os.path.join(subpackage2, 'eggs') 598 for pkg in (subpackage, subpackage2, subpackage3): 599 script_helper.make_pkg(pkg) 600 601 subinitfn = os.path.join(subpackage, '__init__.py') 602 hamfn = script_helper.make_script(subpackage, 'ham', '') 603 spamfn = script_helper.make_script(subpackage2, 'spam', '') 604 eggfn = script_helper.make_script(subpackage3, 'egg', '') 605 606 self.assertRunOK('-q', '-r 0', self.pkgdir) 607 self.assertNotCompiled(subinitfn) 608 self.assertFalse( 609 os.path.exists(os.path.join(subpackage, '__pycache__'))) 610 611 self.assertRunOK('-q', '-r 1', self.pkgdir) 612 self.assertCompiled(subinitfn) 613 self.assertCompiled(hamfn) 614 self.assertNotCompiled(spamfn) 615 616 self.assertRunOK('-q', '-r 2', self.pkgdir) 617 self.assertCompiled(subinitfn) 618 self.assertCompiled(hamfn) 619 self.assertCompiled(spamfn) 620 self.assertNotCompiled(eggfn) 621 622 self.assertRunOK('-q', '-r 5', self.pkgdir) 623 self.assertCompiled(subinitfn) 624 self.assertCompiled(hamfn) 625 self.assertCompiled(spamfn) 626 self.assertCompiled(eggfn) 627 628 @support.skip_unless_symlink 629 def test_symlink_loop(self): 630 # Currently, compileall ignores symlinks to directories. 631 # If that limitation is ever lifted, it should protect against 632 # recursion in symlink loops. 633 pkg = os.path.join(self.pkgdir, 'spam') 634 script_helper.make_pkg(pkg) 635 os.symlink('.', os.path.join(pkg, 'evil')) 636 os.symlink('.', os.path.join(pkg, 'evil2')) 637 self.assertRunOK('-q', self.pkgdir) 638 self.assertCompiled(os.path.join( 639 self.pkgdir, 'spam', 'evil', 'evil2', '__init__.py' 640 )) 641 642 def test_quiet(self): 643 noisy = self.assertRunOK(self.pkgdir) 644 quiet = self.assertRunOK('-q', self.pkgdir) 645 self.assertNotEqual(b'', noisy) 646 self.assertEqual(b'', quiet) 647 648 def test_silent(self): 649 script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax') 650 _, quiet, _ = self.assertRunNotOK('-q', self.pkgdir) 651 _, silent, _ = self.assertRunNotOK('-qq', self.pkgdir) 652 self.assertNotEqual(b'', quiet) 653 self.assertEqual(b'', silent) 654 655 def test_regexp(self): 656 self.assertRunOK('-q', '-x', r'ba[^\\/]*$', self.pkgdir) 657 self.assertNotCompiled(self.barfn) 658 self.assertCompiled(self.initfn) 659 660 def test_multiple_dirs(self): 661 pkgdir2 = os.path.join(self.directory, 'foo2') 662 os.mkdir(pkgdir2) 663 init2fn = script_helper.make_script(pkgdir2, '__init__', '') 664 bar2fn = script_helper.make_script(pkgdir2, 'bar2', '') 665 self.assertRunOK('-q', self.pkgdir, pkgdir2) 666 self.assertCompiled(self.initfn) 667 self.assertCompiled(self.barfn) 668 self.assertCompiled(init2fn) 669 self.assertCompiled(bar2fn) 670 671 def test_d_compile_error(self): 672 script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax') 673 rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir) 674 self.assertRegex(out, b'File "dinsdale') 675 676 def test_d_runtime_error(self): 677 bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception') 678 self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir) 679 fn = script_helper.make_script(self.pkgdir, 'bing', 'import baz') 680 pyc = importlib.util.cache_from_source(bazfn) 681 os.rename(pyc, os.path.join(self.pkgdir, 'baz.pyc')) 682 os.remove(bazfn) 683 rc, out, err = script_helper.assert_python_failure(fn, __isolated=False) 684 self.assertRegex(err, b'File "dinsdale') 685 686 def test_include_bad_file(self): 687 rc, out, err = self.assertRunNotOK( 688 '-i', os.path.join(self.directory, 'nosuchfile'), self.pkgdir) 689 self.assertRegex(out, b'rror.*nosuchfile') 690 self.assertNotRegex(err, b'Traceback') 691 self.assertFalse(os.path.exists(importlib.util.cache_from_source( 692 self.pkgdir_cachedir))) 693 694 def test_include_file_with_arg(self): 695 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 696 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 697 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 698 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 699 with open(os.path.join(self.directory, 'l1'), 'w') as l1: 700 l1.write(os.path.join(self.pkgdir, 'f1.py')+os.linesep) 701 l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep) 702 self.assertRunOK('-i', os.path.join(self.directory, 'l1'), f4) 703 self.assertCompiled(f1) 704 self.assertCompiled(f2) 705 self.assertNotCompiled(f3) 706 self.assertCompiled(f4) 707 708 def test_include_file_no_arg(self): 709 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 710 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 711 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 712 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 713 with open(os.path.join(self.directory, 'l1'), 'w') as l1: 714 l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep) 715 self.assertRunOK('-i', os.path.join(self.directory, 'l1')) 716 self.assertNotCompiled(f1) 717 self.assertCompiled(f2) 718 self.assertNotCompiled(f3) 719 self.assertNotCompiled(f4) 720 721 def test_include_on_stdin(self): 722 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 723 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 724 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 725 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 726 p = script_helper.spawn_python(*(self._get_run_args(()) + ['-i', '-'])) 727 p.stdin.write((f3+os.linesep).encode('ascii')) 728 script_helper.kill_python(p) 729 self.assertNotCompiled(f1) 730 self.assertNotCompiled(f2) 731 self.assertCompiled(f3) 732 self.assertNotCompiled(f4) 733 734 def test_compiles_as_much_as_possible(self): 735 bingfn = script_helper.make_script(self.pkgdir, 'bing', 'syntax(error') 736 rc, out, err = self.assertRunNotOK('nosuchfile', self.initfn, 737 bingfn, self.barfn) 738 self.assertRegex(out, b'rror') 739 self.assertNotCompiled(bingfn) 740 self.assertCompiled(self.initfn) 741 self.assertCompiled(self.barfn) 742 743 def test_invalid_arg_produces_message(self): 744 out = self.assertRunOK('badfilename') 745 self.assertRegex(out, b"Can't list 'badfilename'") 746 747 def test_pyc_invalidation_mode(self): 748 script_helper.make_script(self.pkgdir, 'f1', '') 749 pyc = importlib.util.cache_from_source( 750 os.path.join(self.pkgdir, 'f1.py')) 751 self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir) 752 with open(pyc, 'rb') as fp: 753 data = fp.read() 754 self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11) 755 self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir) 756 with open(pyc, 'rb') as fp: 757 data = fp.read() 758 self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b01) 759 760 @skipUnless(_have_multiprocessing, "requires multiprocessing") 761 def test_workers(self): 762 bar2fn = script_helper.make_script(self.directory, 'bar2', '') 763 files = [] 764 for suffix in range(5): 765 pkgdir = os.path.join(self.directory, 'foo{}'.format(suffix)) 766 os.mkdir(pkgdir) 767 fn = script_helper.make_script(pkgdir, '__init__', '') 768 files.append(script_helper.make_script(pkgdir, 'bar2', '')) 769 770 self.assertRunOK(self.directory, '-j', '0') 771 self.assertCompiled(bar2fn) 772 for file in files: 773 self.assertCompiled(file) 774 775 @mock.patch('compileall.compile_dir') 776 def test_workers_available_cores(self, compile_dir): 777 with mock.patch("sys.argv", 778 new=[sys.executable, self.directory, "-j0"]): 779 compileall.main() 780 self.assertTrue(compile_dir.called) 781 self.assertEqual(compile_dir.call_args[-1]['workers'], 0) 782 783 def test_strip_and_prepend(self): 784 fullpath = ["test", "build", "real", "path"] 785 path = os.path.join(self.directory, *fullpath) 786 os.makedirs(path) 787 script = script_helper.make_script(path, "test", "1 / 0") 788 bc = importlib.util.cache_from_source(script) 789 stripdir = os.path.join(self.directory, *fullpath[:2]) 790 prependdir = "/foo" 791 self.assertRunOK("-s", stripdir, "-p", prependdir, path) 792 rc, out, err = script_helper.assert_python_failure(bc) 793 expected_in = os.path.join(prependdir, *fullpath[2:]) 794 self.assertIn( 795 expected_in, 796 str(err, encoding=sys.getdefaultencoding()) 797 ) 798 self.assertNotIn( 799 stripdir, 800 str(err, encoding=sys.getdefaultencoding()) 801 ) 802 803 def test_multiple_optimization_levels(self): 804 path = os.path.join(self.directory, "optimizations") 805 os.makedirs(path) 806 script = script_helper.make_script(path, 807 "test_optimization", 808 "a = 0") 809 bc = [] 810 for opt_level in "", 1, 2, 3: 811 bc.append(importlib.util.cache_from_source(script, 812 optimization=opt_level)) 813 test_combinations = [["0", "1"], 814 ["1", "2"], 815 ["0", "2"], 816 ["0", "1", "2"]] 817 for opt_combination in test_combinations: 818 self.assertRunOK(path, *("-o" + str(n) for n in opt_combination)) 819 for opt_level in opt_combination: 820 self.assertTrue(os.path.isfile(bc[int(opt_level)])) 821 try: 822 os.unlink(bc[opt_level]) 823 except Exception: 824 pass 825 826 @support.skip_unless_symlink 827 def test_ignore_symlink_destination(self): 828 # Create folders for allowed files, symlinks and prohibited area 829 allowed_path = os.path.join(self.directory, "test", "dir", "allowed") 830 symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks") 831 prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited") 832 os.makedirs(allowed_path) 833 os.makedirs(symlinks_path) 834 os.makedirs(prohibited_path) 835 836 # Create scripts and symlinks and remember their byte-compiled versions 837 allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0") 838 prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0") 839 allowed_symlink = os.path.join(symlinks_path, "test_allowed.py") 840 prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py") 841 os.symlink(allowed_script, allowed_symlink) 842 os.symlink(prohibited_script, prohibited_symlink) 843 allowed_bc = importlib.util.cache_from_source(allowed_symlink) 844 prohibited_bc = importlib.util.cache_from_source(prohibited_symlink) 845 846 self.assertRunOK(symlinks_path, "-e", allowed_path) 847 848 self.assertTrue(os.path.isfile(allowed_bc)) 849 self.assertFalse(os.path.isfile(prohibited_bc)) 850 851 def test_hardlink_bad_args(self): 852 # Bad arguments combination, hardlink deduplication make sense 853 # only for more than one optimization level 854 self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes") 855 856 def test_hardlink(self): 857 # 'a = 0' code produces the same bytecode for the 3 optimization 858 # levels. All three .pyc files must have the same inode (hardlinks). 859 # 860 # If deduplication is disabled, all pyc files must have different 861 # inodes. 862 for dedup in (True, False): 863 with tempfile.TemporaryDirectory() as path: 864 with self.subTest(dedup=dedup): 865 script = script_helper.make_script(path, "script", "a = 0") 866 pycs = get_pycs(script) 867 868 args = ["-q", "-o 0", "-o 1", "-o 2"] 869 if dedup: 870 args.append("--hardlink-dupes") 871 self.assertRunOK(path, *args) 872 873 self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup) 874 self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup) 875 self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup) 876 877 878class CommandLineTestsWithSourceEpoch(CommandLineTestsBase, 879 unittest.TestCase, 880 metaclass=SourceDateEpochTestMeta, 881 source_date_epoch=True): 882 pass 883 884 885class CommandLineTestsNoSourceEpoch(CommandLineTestsBase, 886 unittest.TestCase, 887 metaclass=SourceDateEpochTestMeta, 888 source_date_epoch=False): 889 pass 890 891 892 893class HardlinkDedupTestsBase: 894 # Test hardlink_dupes parameter of compileall.compile_dir() 895 896 def setUp(self): 897 self.path = None 898 899 @contextlib.contextmanager 900 def temporary_directory(self): 901 with tempfile.TemporaryDirectory() as path: 902 self.path = path 903 yield path 904 self.path = None 905 906 def make_script(self, code, name="script"): 907 return script_helper.make_script(self.path, name, code) 908 909 def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False): 910 compileall.compile_dir(self.path, quiet=True, optimize=optimize, 911 hardlink_dupes=dedup, force=force) 912 913 def test_bad_args(self): 914 # Bad arguments combination, hardlink deduplication make sense 915 # only for more than one optimization level 916 with self.temporary_directory(): 917 self.make_script("pass") 918 with self.assertRaises(ValueError): 919 compileall.compile_dir(self.path, quiet=True, optimize=0, 920 hardlink_dupes=True) 921 with self.assertRaises(ValueError): 922 # same optimization level specified twice: 923 # compile_dir() removes duplicates 924 compileall.compile_dir(self.path, quiet=True, optimize=[0, 0], 925 hardlink_dupes=True) 926 927 def create_code(self, docstring=False, assertion=False): 928 lines = [] 929 if docstring: 930 lines.append("'module docstring'") 931 lines.append('x = 1') 932 if assertion: 933 lines.append("assert x == 1") 934 return '\n'.join(lines) 935 936 def iter_codes(self): 937 for docstring in (False, True): 938 for assertion in (False, True): 939 code = self.create_code(docstring=docstring, assertion=assertion) 940 yield (code, docstring, assertion) 941 942 def test_disabled(self): 943 # Deduplication disabled, no hardlinks 944 for code, docstring, assertion in self.iter_codes(): 945 with self.subTest(docstring=docstring, assertion=assertion): 946 with self.temporary_directory(): 947 script = self.make_script(code) 948 pycs = get_pycs(script) 949 self.compile_dir(dedup=False) 950 self.assertFalse(is_hardlink(pycs[0], pycs[1])) 951 self.assertFalse(is_hardlink(pycs[0], pycs[2])) 952 self.assertFalse(is_hardlink(pycs[1], pycs[2])) 953 954 def check_hardlinks(self, script, docstring=False, assertion=False): 955 pycs = get_pycs(script) 956 self.assertEqual(is_hardlink(pycs[0], pycs[1]), 957 not assertion) 958 self.assertEqual(is_hardlink(pycs[0], pycs[2]), 959 not assertion and not docstring) 960 self.assertEqual(is_hardlink(pycs[1], pycs[2]), 961 not docstring) 962 963 def test_hardlink(self): 964 # Test deduplication on all combinations 965 for code, docstring, assertion in self.iter_codes(): 966 with self.subTest(docstring=docstring, assertion=assertion): 967 with self.temporary_directory(): 968 script = self.make_script(code) 969 self.compile_dir() 970 self.check_hardlinks(script, docstring, assertion) 971 972 def test_only_two_levels(self): 973 # Don't build the 3 optimization levels, but only 2 974 for opts in ((0, 1), (1, 2), (0, 2)): 975 with self.subTest(opts=opts): 976 with self.temporary_directory(): 977 # code with no dostring and no assertion: 978 # same bytecode for all optimization levels 979 script = self.make_script(self.create_code()) 980 self.compile_dir(optimize=opts) 981 pyc1 = get_pyc(script, opts[0]) 982 pyc2 = get_pyc(script, opts[1]) 983 self.assertTrue(is_hardlink(pyc1, pyc2)) 984 985 def test_duplicated_levels(self): 986 # compile_dir() must not fail if optimize contains duplicated 987 # optimization levels and/or if optimization levels are not sorted. 988 with self.temporary_directory(): 989 # code with no dostring and no assertion: 990 # same bytecode for all optimization levels 991 script = self.make_script(self.create_code()) 992 self.compile_dir(optimize=[1, 0, 1, 0]) 993 pyc1 = get_pyc(script, 0) 994 pyc2 = get_pyc(script, 1) 995 self.assertTrue(is_hardlink(pyc1, pyc2)) 996 997 def test_recompilation(self): 998 # Test compile_dir() when pyc files already exists and the script 999 # content changed 1000 with self.temporary_directory(): 1001 script = self.make_script("a = 0") 1002 self.compile_dir() 1003 # All three levels have the same inode 1004 self.check_hardlinks(script) 1005 1006 pycs = get_pycs(script) 1007 inode = os.stat(pycs[0]).st_ino 1008 1009 # Change of the module content 1010 script = self.make_script("print(0)") 1011 1012 # Recompilation without -o 1 1013 self.compile_dir(optimize=[0, 2], force=True) 1014 1015 # opt-1.pyc should have the same inode as before and others should not 1016 self.assertEqual(inode, os.stat(pycs[1]).st_ino) 1017 self.assertTrue(is_hardlink(pycs[0], pycs[2])) 1018 self.assertNotEqual(inode, os.stat(pycs[2]).st_ino) 1019 # opt-1.pyc and opt-2.pyc have different content 1020 self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) 1021 1022 def test_import(self): 1023 # Test that import updates a single pyc file when pyc files already 1024 # exists and the script content changed 1025 with self.temporary_directory(): 1026 script = self.make_script(self.create_code(), name="module") 1027 self.compile_dir() 1028 # All three levels have the same inode 1029 self.check_hardlinks(script) 1030 1031 pycs = get_pycs(script) 1032 inode = os.stat(pycs[0]).st_ino 1033 1034 # Change of the module content 1035 script = self.make_script("print(0)", name="module") 1036 1037 # Import the module in Python with -O (optimization level 1) 1038 script_helper.assert_python_ok( 1039 "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path 1040 ) 1041 1042 # Only opt-1.pyc is changed 1043 self.assertEqual(inode, os.stat(pycs[0]).st_ino) 1044 self.assertEqual(inode, os.stat(pycs[2]).st_ino) 1045 self.assertFalse(is_hardlink(pycs[1], pycs[2])) 1046 # opt-1.pyc and opt-2.pyc have different content 1047 self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) 1048 1049 1050class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase, 1051 unittest.TestCase, 1052 metaclass=SourceDateEpochTestMeta, 1053 source_date_epoch=True): 1054 pass 1055 1056 1057class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase, 1058 unittest.TestCase, 1059 metaclass=SourceDateEpochTestMeta, 1060 source_date_epoch=False): 1061 pass 1062 1063 1064if __name__ == "__main__": 1065 unittest.main() 1066