• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1import compileall
2import contextlib
3import filecmp
4import importlib.util
5import io
6import os
7import py_compile
8import shutil
9import struct
10import sys
11import tempfile
12import test.test_importlib.util
13import time
14import unittest
15
16from unittest import mock, skipUnless
17try:
18    # compileall relies on ProcessPoolExecutor if ProcessPoolExecutor exists
19    # and it can function.
20    from multiprocessing.util import _cleanup_tests as multiprocessing_cleanup_tests
21    from concurrent.futures import ProcessPoolExecutor
22    from concurrent.futures.process import _check_system_limits
23    _check_system_limits()
24    _have_multiprocessing = True
25except (NotImplementedError, ModuleNotFoundError):
26    _have_multiprocessing = False
27
28from test import support
29from test.support import os_helper
30from test.support import script_helper
31from test.test_py_compile import without_source_date_epoch
32from test.test_py_compile import SourceDateEpochTestMeta
33from test.support.os_helper import FakePath
34
35
36def get_pyc(script, opt):
37    if not opt:
38        # Replace None and 0 with ''
39        opt = ''
40    return importlib.util.cache_from_source(script, optimization=opt)
41
42
43def get_pycs(script):
44    return [get_pyc(script, opt) for opt in (0, 1, 2)]
45
46
47def is_hardlink(filename1, filename2):
48    """Returns True if two files have the same inode (hardlink)"""
49    inode1 = os.stat(filename1).st_ino
50    inode2 = os.stat(filename2).st_ino
51    return inode1 == inode2
52
53
54class CompileallTestsBase:
55
56    def setUp(self):
57        self.directory = tempfile.mkdtemp()
58        self.addCleanup(shutil.rmtree, self.directory)
59
60        self.source_path = os.path.join(self.directory, '_test.py')
61        self.bc_path = importlib.util.cache_from_source(self.source_path)
62        with open(self.source_path, 'w', encoding="utf-8") as file:
63            file.write('x = 123\n')
64        self.source_path2 = os.path.join(self.directory, '_test2.py')
65        self.bc_path2 = importlib.util.cache_from_source(self.source_path2)
66        shutil.copyfile(self.source_path, self.source_path2)
67        self.subdirectory = os.path.join(self.directory, '_subdir')
68        os.mkdir(self.subdirectory)
69        self.source_path3 = os.path.join(self.subdirectory, '_test3.py')
70        shutil.copyfile(self.source_path, self.source_path3)
71
72    def add_bad_source_file(self):
73        self.bad_source_path = os.path.join(self.directory, '_test_bad.py')
74        with open(self.bad_source_path, 'w', encoding="utf-8") as file:
75            file.write('x (\n')
76
77    def timestamp_metadata(self):
78        with open(self.bc_path, 'rb') as file:
79            data = file.read(12)
80        mtime = int(os.stat(self.source_path).st_mtime)
81        compare = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER, 0,
82                              mtime & 0xFFFF_FFFF)
83        return data, compare
84
85    def test_year_2038_mtime_compilation(self):
86        # Test to make sure we can handle mtimes larger than what a 32-bit
87        # signed number can hold as part of bpo-34990
88        try:
89            os.utime(self.source_path, (2**32 - 1, 2**32 - 1))
90        except (OverflowError, OSError):
91            self.skipTest("filesystem doesn't support timestamps near 2**32")
92        with contextlib.redirect_stdout(io.StringIO()):
93            self.assertTrue(compileall.compile_file(self.source_path))
94
95    def test_larger_than_32_bit_times(self):
96        # This is similar to the test above but we skip it if the OS doesn't
97        # support modification times larger than 32-bits.
98        try:
99            os.utime(self.source_path, (2**35, 2**35))
100        except (OverflowError, OSError):
101            self.skipTest("filesystem doesn't support large timestamps")
102        with contextlib.redirect_stdout(io.StringIO()):
103            self.assertTrue(compileall.compile_file(self.source_path))
104
105    def recreation_check(self, metadata):
106        """Check that compileall recreates bytecode when the new metadata is
107        used."""
108        if os.environ.get('SOURCE_DATE_EPOCH'):
109            raise unittest.SkipTest('SOURCE_DATE_EPOCH is set')
110        py_compile.compile(self.source_path)
111        self.assertEqual(*self.timestamp_metadata())
112        with open(self.bc_path, 'rb') as file:
113            bc = file.read()[len(metadata):]
114        with open(self.bc_path, 'wb') as file:
115            file.write(metadata)
116            file.write(bc)
117        self.assertNotEqual(*self.timestamp_metadata())
118        compileall.compile_dir(self.directory, force=False, quiet=True)
119        self.assertTrue(*self.timestamp_metadata())
120
121    def test_mtime(self):
122        # Test a change in mtime leads to a new .pyc.
123        self.recreation_check(struct.pack('<4sLL', importlib.util.MAGIC_NUMBER,
124                                          0, 1))
125
126    def test_magic_number(self):
127        # Test a change in mtime leads to a new .pyc.
128        self.recreation_check(b'\0\0\0\0')
129
130    def test_compile_files(self):
131        # Test compiling a single file, and complete directory
132        for fn in (self.bc_path, self.bc_path2):
133            try:
134                os.unlink(fn)
135            except:
136                pass
137        self.assertTrue(compileall.compile_file(self.source_path,
138                                                force=False, quiet=True))
139        self.assertTrue(os.path.isfile(self.bc_path) and
140                        not os.path.isfile(self.bc_path2))
141        os.unlink(self.bc_path)
142        self.assertTrue(compileall.compile_dir(self.directory, force=False,
143                                               quiet=True))
144        self.assertTrue(os.path.isfile(self.bc_path) and
145                        os.path.isfile(self.bc_path2))
146        os.unlink(self.bc_path)
147        os.unlink(self.bc_path2)
148        # Test against bad files
149        self.add_bad_source_file()
150        self.assertFalse(compileall.compile_file(self.bad_source_path,
151                                                 force=False, quiet=2))
152        self.assertFalse(compileall.compile_dir(self.directory,
153                                                force=False, quiet=2))
154
155    def test_compile_file_pathlike(self):
156        self.assertFalse(os.path.isfile(self.bc_path))
157        # we should also test the output
158        with support.captured_stdout() as stdout:
159            self.assertTrue(compileall.compile_file(FakePath(self.source_path)))
160        self.assertRegex(stdout.getvalue(), r'Compiling ([^WindowsPath|PosixPath].*)')
161        self.assertTrue(os.path.isfile(self.bc_path))
162
163    def test_compile_file_pathlike_ddir(self):
164        self.assertFalse(os.path.isfile(self.bc_path))
165        self.assertTrue(compileall.compile_file(FakePath(self.source_path),
166                                                ddir=FakePath('ddir_path'),
167                                                quiet=2))
168        self.assertTrue(os.path.isfile(self.bc_path))
169
170    def test_compile_file_pathlike_stripdir(self):
171        self.assertFalse(os.path.isfile(self.bc_path))
172        self.assertTrue(compileall.compile_file(FakePath(self.source_path),
173                                                stripdir=FakePath('stripdir_path'),
174                                                quiet=2))
175        self.assertTrue(os.path.isfile(self.bc_path))
176
177    def test_compile_file_pathlike_prependdir(self):
178        self.assertFalse(os.path.isfile(self.bc_path))
179        self.assertTrue(compileall.compile_file(FakePath(self.source_path),
180                                                prependdir=FakePath('prependdir_path'),
181                                                quiet=2))
182        self.assertTrue(os.path.isfile(self.bc_path))
183
184    def test_compile_path(self):
185        with test.test_importlib.util.import_state(path=[self.directory]):
186            self.assertTrue(compileall.compile_path(quiet=2))
187
188        with test.test_importlib.util.import_state(path=[self.directory]):
189            self.add_bad_source_file()
190            self.assertFalse(compileall.compile_path(skip_curdir=False,
191                                                     force=True, quiet=2))
192
193    def test_no_pycache_in_non_package(self):
194        # Bug 8563 reported that __pycache__ directories got created by
195        # compile_file() for non-.py files.
196        data_dir = os.path.join(self.directory, 'data')
197        data_file = os.path.join(data_dir, 'file')
198        os.mkdir(data_dir)
199        # touch data/file
200        with open(data_file, 'wb'):
201            pass
202        compileall.compile_file(data_file)
203        self.assertFalse(os.path.exists(os.path.join(data_dir, '__pycache__')))
204
205
206    def test_compile_file_encoding_fallback(self):
207        # Bug 44666 reported that compile_file failed when sys.stdout.encoding is None
208        self.add_bad_source_file()
209        with contextlib.redirect_stdout(io.StringIO()):
210            self.assertFalse(compileall.compile_file(self.bad_source_path))
211
212
213    def test_optimize(self):
214        # make sure compiling with different optimization settings than the
215        # interpreter's creates the correct file names
216        optimize, opt = (1, 1) if __debug__ else (0, '')
217        compileall.compile_dir(self.directory, quiet=True, optimize=optimize)
218        cached = importlib.util.cache_from_source(self.source_path,
219                                                  optimization=opt)
220        self.assertTrue(os.path.isfile(cached))
221        cached2 = importlib.util.cache_from_source(self.source_path2,
222                                                   optimization=opt)
223        self.assertTrue(os.path.isfile(cached2))
224        cached3 = importlib.util.cache_from_source(self.source_path3,
225                                                   optimization=opt)
226        self.assertTrue(os.path.isfile(cached3))
227
228    def test_compile_dir_pathlike(self):
229        self.assertFalse(os.path.isfile(self.bc_path))
230        with support.captured_stdout() as stdout:
231            compileall.compile_dir(FakePath(self.directory))
232        line = stdout.getvalue().splitlines()[0]
233        self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)')
234        self.assertTrue(os.path.isfile(self.bc_path))
235
236    def test_compile_dir_pathlike_stripdir(self):
237        self.assertFalse(os.path.isfile(self.bc_path))
238        self.assertTrue(compileall.compile_dir(FakePath(self.directory),
239                                               stripdir=FakePath('stripdir_path'),
240                                               quiet=2))
241        self.assertTrue(os.path.isfile(self.bc_path))
242
243    def test_compile_dir_pathlike_prependdir(self):
244        self.assertFalse(os.path.isfile(self.bc_path))
245        self.assertTrue(compileall.compile_dir(FakePath(self.directory),
246                                               prependdir=FakePath('prependdir_path'),
247                                               quiet=2))
248        self.assertTrue(os.path.isfile(self.bc_path))
249
250    @skipUnless(_have_multiprocessing, "requires multiprocessing")
251    @mock.patch('concurrent.futures.ProcessPoolExecutor')
252    def test_compile_pool_called(self, pool_mock):
253        compileall.compile_dir(self.directory, quiet=True, workers=5)
254        self.assertTrue(pool_mock.called)
255
256    def test_compile_workers_non_positive(self):
257        with self.assertRaisesRegex(ValueError,
258                                    "workers must be greater or equal to 0"):
259            compileall.compile_dir(self.directory, workers=-1)
260
261    @skipUnless(_have_multiprocessing, "requires multiprocessing")
262    @mock.patch('concurrent.futures.ProcessPoolExecutor')
263    def test_compile_workers_cpu_count(self, pool_mock):
264        compileall.compile_dir(self.directory, quiet=True, workers=0)
265        self.assertEqual(pool_mock.call_args[1]['max_workers'], None)
266
267    @skipUnless(_have_multiprocessing, "requires multiprocessing")
268    @mock.patch('concurrent.futures.ProcessPoolExecutor')
269    @mock.patch('compileall.compile_file')
270    def test_compile_one_worker(self, compile_file_mock, pool_mock):
271        compileall.compile_dir(self.directory, quiet=True)
272        self.assertFalse(pool_mock.called)
273        self.assertTrue(compile_file_mock.called)
274
275    @skipUnless(_have_multiprocessing, "requires multiprocessing")
276    @mock.patch('concurrent.futures.ProcessPoolExecutor', new=None)
277    @mock.patch('compileall.compile_file')
278    def test_compile_missing_multiprocessing(self, compile_file_mock):
279        compileall.compile_dir(self.directory, quiet=True, workers=5)
280        self.assertTrue(compile_file_mock.called)
281
282    def test_compile_dir_maxlevels(self):
283        # Test the actual impact of maxlevels parameter
284        depth = 3
285        path = self.directory
286        for i in range(1, depth + 1):
287            path = os.path.join(path, f"dir_{i}")
288            source = os.path.join(path, 'script.py')
289            os.mkdir(path)
290            shutil.copyfile(self.source_path, source)
291        pyc_filename = importlib.util.cache_from_source(source)
292
293        compileall.compile_dir(self.directory, quiet=True, maxlevels=depth - 1)
294        self.assertFalse(os.path.isfile(pyc_filename))
295
296        compileall.compile_dir(self.directory, quiet=True, maxlevels=depth)
297        self.assertTrue(os.path.isfile(pyc_filename))
298
299    def _test_ddir_only(self, *, ddir, parallel=True):
300        """Recursive compile_dir ddir must contain package paths; bpo39769."""
301        fullpath = ["test", "foo"]
302        path = self.directory
303        mods = []
304        for subdir in fullpath:
305            path = os.path.join(path, subdir)
306            os.mkdir(path)
307            script_helper.make_script(path, "__init__", "")
308            mods.append(script_helper.make_script(path, "mod",
309                                                  "def fn(): 1/0\nfn()\n"))
310
311        if parallel:
312            self.addCleanup(multiprocessing_cleanup_tests)
313        compileall.compile_dir(
314                self.directory, quiet=True, ddir=ddir,
315                workers=2 if parallel else 1)
316
317        self.assertTrue(mods)
318        for mod in mods:
319            self.assertTrue(mod.startswith(self.directory), mod)
320            modcode = importlib.util.cache_from_source(mod)
321            modpath = mod[len(self.directory+os.sep):]
322            _, _, err = script_helper.assert_python_failure(modcode)
323            expected_in = os.path.join(ddir, modpath)
324            mod_code_obj = test.test_importlib.util.get_code_from_pyc(modcode)
325            self.assertEqual(mod_code_obj.co_filename, expected_in)
326            self.assertIn(f'"{expected_in}"', os.fsdecode(err))
327
328    def test_ddir_only_one_worker(self):
329        """Recursive compile_dir ddir= contains package paths; bpo39769."""
330        return self._test_ddir_only(ddir="<a prefix>", parallel=False)
331
332    @skipUnless(_have_multiprocessing, "requires multiprocessing")
333    def test_ddir_multiple_workers(self):
334        """Recursive compile_dir ddir= contains package paths; bpo39769."""
335        return self._test_ddir_only(ddir="<a prefix>", parallel=True)
336
337    def test_ddir_empty_only_one_worker(self):
338        """Recursive compile_dir ddir='' contains package paths; bpo39769."""
339        return self._test_ddir_only(ddir="", parallel=False)
340
341    @skipUnless(_have_multiprocessing, "requires multiprocessing")
342    def test_ddir_empty_multiple_workers(self):
343        """Recursive compile_dir ddir='' contains package paths; bpo39769."""
344        return self._test_ddir_only(ddir="", parallel=True)
345
346    def test_strip_only(self):
347        fullpath = ["test", "build", "real", "path"]
348        path = os.path.join(self.directory, *fullpath)
349        os.makedirs(path)
350        script = script_helper.make_script(path, "test", "1 / 0")
351        bc = importlib.util.cache_from_source(script)
352        stripdir = os.path.join(self.directory, *fullpath[:2])
353        compileall.compile_dir(path, quiet=True, stripdir=stripdir)
354        rc, out, err = script_helper.assert_python_failure(bc)
355        expected_in = os.path.join(*fullpath[2:])
356        self.assertIn(
357            expected_in,
358            str(err, encoding=sys.getdefaultencoding())
359        )
360        self.assertNotIn(
361            stripdir,
362            str(err, encoding=sys.getdefaultencoding())
363        )
364
365    def test_strip_only_invalid(self):
366        fullpath = ["test", "build", "real", "path"]
367        path = os.path.join(self.directory, *fullpath)
368        os.makedirs(path)
369        script = script_helper.make_script(path, "test", "1 / 0")
370        bc = importlib.util.cache_from_source(script)
371        stripdir = os.path.join(self.directory, *(fullpath[:2] + ['fake']))
372        with support.captured_stdout() as out:
373            compileall.compile_dir(path, quiet=True, stripdir=stripdir)
374        self.assertIn("not a valid prefix", out.getvalue())
375        rc, out, err = script_helper.assert_python_failure(bc)
376        expected_not_in = os.path.join(self.directory, *fullpath[2:])
377        self.assertIn(
378            path,
379            str(err, encoding=sys.getdefaultencoding())
380        )
381        self.assertNotIn(
382            expected_not_in,
383            str(err, encoding=sys.getdefaultencoding())
384        )
385        self.assertNotIn(
386            stripdir,
387            str(err, encoding=sys.getdefaultencoding())
388        )
389
390    def test_prepend_only(self):
391        fullpath = ["test", "build", "real", "path"]
392        path = os.path.join(self.directory, *fullpath)
393        os.makedirs(path)
394        script = script_helper.make_script(path, "test", "1 / 0")
395        bc = importlib.util.cache_from_source(script)
396        prependdir = "/foo"
397        compileall.compile_dir(path, quiet=True, prependdir=prependdir)
398        rc, out, err = script_helper.assert_python_failure(bc)
399        expected_in = os.path.join(prependdir, self.directory, *fullpath)
400        self.assertIn(
401            expected_in,
402            str(err, encoding=sys.getdefaultencoding())
403        )
404
405    def test_strip_and_prepend(self):
406        fullpath = ["test", "build", "real", "path"]
407        path = os.path.join(self.directory, *fullpath)
408        os.makedirs(path)
409        script = script_helper.make_script(path, "test", "1 / 0")
410        bc = importlib.util.cache_from_source(script)
411        stripdir = os.path.join(self.directory, *fullpath[:2])
412        prependdir = "/foo"
413        compileall.compile_dir(path, quiet=True,
414                               stripdir=stripdir, prependdir=prependdir)
415        rc, out, err = script_helper.assert_python_failure(bc)
416        expected_in = os.path.join(prependdir, *fullpath[2:])
417        self.assertIn(
418            expected_in,
419            str(err, encoding=sys.getdefaultencoding())
420        )
421        self.assertNotIn(
422            stripdir,
423            str(err, encoding=sys.getdefaultencoding())
424        )
425
426    def test_strip_prepend_and_ddir(self):
427        fullpath = ["test", "build", "real", "path", "ddir"]
428        path = os.path.join(self.directory, *fullpath)
429        os.makedirs(path)
430        script_helper.make_script(path, "test", "1 / 0")
431        with self.assertRaises(ValueError):
432            compileall.compile_dir(path, quiet=True, ddir="/bar",
433                                   stripdir="/foo", prependdir="/bar")
434
435    def test_multiple_optimization_levels(self):
436        script = script_helper.make_script(self.directory,
437                                           "test_optimization",
438                                           "a = 0")
439        bc = []
440        for opt_level in "", 1, 2, 3:
441            bc.append(importlib.util.cache_from_source(script,
442                                                       optimization=opt_level))
443        test_combinations = [[0, 1], [1, 2], [0, 2], [0, 1, 2]]
444        for opt_combination in test_combinations:
445            compileall.compile_file(script, quiet=True,
446                                    optimize=opt_combination)
447            for opt_level in opt_combination:
448                self.assertTrue(os.path.isfile(bc[opt_level]))
449                try:
450                    os.unlink(bc[opt_level])
451                except Exception:
452                    pass
453
454    @os_helper.skip_unless_symlink
455    def test_ignore_symlink_destination(self):
456        # Create folders for allowed files, symlinks and prohibited area
457        allowed_path = os.path.join(self.directory, "test", "dir", "allowed")
458        symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks")
459        prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited")
460        os.makedirs(allowed_path)
461        os.makedirs(symlinks_path)
462        os.makedirs(prohibited_path)
463
464        # Create scripts and symlinks and remember their byte-compiled versions
465        allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0")
466        prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0")
467        allowed_symlink = os.path.join(symlinks_path, "test_allowed.py")
468        prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py")
469        os.symlink(allowed_script, allowed_symlink)
470        os.symlink(prohibited_script, prohibited_symlink)
471        allowed_bc = importlib.util.cache_from_source(allowed_symlink)
472        prohibited_bc = importlib.util.cache_from_source(prohibited_symlink)
473
474        compileall.compile_dir(symlinks_path, quiet=True, limit_sl_dest=allowed_path)
475
476        self.assertTrue(os.path.isfile(allowed_bc))
477        self.assertFalse(os.path.isfile(prohibited_bc))
478
479
480class CompileallTestsWithSourceEpoch(CompileallTestsBase,
481                                     unittest.TestCase,
482                                     metaclass=SourceDateEpochTestMeta,
483                                     source_date_epoch=True):
484    pass
485
486
487class CompileallTestsWithoutSourceEpoch(CompileallTestsBase,
488                                        unittest.TestCase,
489                                        metaclass=SourceDateEpochTestMeta,
490                                        source_date_epoch=False):
491    pass
492
493
494# WASI does not have a temp directory and uses cwd instead. The cwd contains
495# non-ASCII chars, so _walk_dir() fails to encode self.directory.
496@unittest.skipIf(support.is_wasi, "tempdir is not encodable on WASI")
497class EncodingTest(unittest.TestCase):
498    """Issue 6716: compileall should escape source code when printing errors
499    to stdout."""
500
501    def setUp(self):
502        self.directory = tempfile.mkdtemp()
503        self.source_path = os.path.join(self.directory, '_test.py')
504        with open(self.source_path, 'w', encoding='utf-8') as file:
505            # Intentional syntax error: bytes can only contain
506            # ASCII literal characters.
507            file.write('b"\u20ac"')
508
509    def tearDown(self):
510        shutil.rmtree(self.directory)
511
512    def test_error(self):
513        buffer = io.TextIOWrapper(io.BytesIO(), encoding='ascii')
514        with contextlib.redirect_stdout(buffer):
515            compiled = compileall.compile_dir(self.directory)
516        self.assertFalse(compiled)  # should not be successful
517        buffer.seek(0)
518        res = buffer.read()
519        self.assertIn(
520            'SyntaxError: bytes can only contain ASCII literal characters',
521            res,
522        )
523        self.assertNotIn('UnicodeEncodeError', res)
524
525
526class CommandLineTestsBase:
527    """Test compileall's CLI."""
528
529    def setUp(self):
530        self.directory = tempfile.mkdtemp()
531        self.addCleanup(os_helper.rmtree, self.directory)
532        self.pkgdir = os.path.join(self.directory, 'foo')
533        os.mkdir(self.pkgdir)
534        self.pkgdir_cachedir = os.path.join(self.pkgdir, '__pycache__')
535        # Create the __init__.py and a package module.
536        self.initfn = script_helper.make_script(self.pkgdir, '__init__', '')
537        self.barfn = script_helper.make_script(self.pkgdir, 'bar', '')
538
539    @contextlib.contextmanager
540    def temporary_pycache_prefix(self):
541        """Adjust and restore sys.pycache_prefix."""
542        old_prefix = sys.pycache_prefix
543        new_prefix = os.path.join(self.directory, '__testcache__')
544        try:
545            sys.pycache_prefix = new_prefix
546            yield {
547                'PYTHONPATH': self.directory,
548                'PYTHONPYCACHEPREFIX': new_prefix,
549            }
550        finally:
551            sys.pycache_prefix = old_prefix
552
553    def _get_run_args(self, args):
554        return [*support.optim_args_from_interpreter_flags(),
555                '-S', '-m', 'compileall',
556                *args]
557
558    def assertRunOK(self, *args, **env_vars):
559        rc, out, err = script_helper.assert_python_ok(
560                         *self._get_run_args(args), **env_vars,
561                         PYTHONIOENCODING='utf-8')
562        self.assertEqual(b'', err)
563        return out
564
565    def assertRunNotOK(self, *args, **env_vars):
566        rc, out, err = script_helper.assert_python_failure(
567                        *self._get_run_args(args), **env_vars,
568                        PYTHONIOENCODING='utf-8')
569        return rc, out, err
570
571    def assertCompiled(self, fn):
572        path = importlib.util.cache_from_source(fn)
573        self.assertTrue(os.path.exists(path))
574
575    def assertNotCompiled(self, fn):
576        path = importlib.util.cache_from_source(fn)
577        self.assertFalse(os.path.exists(path))
578
579    def test_no_args_compiles_path(self):
580        # Note that -l is implied for the no args case.
581        bazfn = script_helper.make_script(self.directory, 'baz', '')
582        with self.temporary_pycache_prefix() as env:
583            self.assertRunOK(**env)
584            self.assertCompiled(bazfn)
585            self.assertNotCompiled(self.initfn)
586            self.assertNotCompiled(self.barfn)
587
588    @without_source_date_epoch  # timestamp invalidation test
589    @support.requires_resource('cpu')
590    def test_no_args_respects_force_flag(self):
591        bazfn = script_helper.make_script(self.directory, 'baz', '')
592        with self.temporary_pycache_prefix() as env:
593            self.assertRunOK(**env)
594            pycpath = importlib.util.cache_from_source(bazfn)
595        # Set atime/mtime backward to avoid file timestamp resolution issues
596        os.utime(pycpath, (time.time()-60,)*2)
597        mtime = os.stat(pycpath).st_mtime
598        # Without force, no recompilation
599        self.assertRunOK(**env)
600        mtime2 = os.stat(pycpath).st_mtime
601        self.assertEqual(mtime, mtime2)
602        # Now force it.
603        self.assertRunOK('-f', **env)
604        mtime2 = os.stat(pycpath).st_mtime
605        self.assertNotEqual(mtime, mtime2)
606
607    @support.requires_resource('cpu')
608    def test_no_args_respects_quiet_flag(self):
609        script_helper.make_script(self.directory, 'baz', '')
610        with self.temporary_pycache_prefix() as env:
611            noisy = self.assertRunOK(**env)
612        self.assertIn(b'Listing ', noisy)
613        quiet = self.assertRunOK('-q', **env)
614        self.assertNotIn(b'Listing ', quiet)
615
616    # Ensure that the default behavior of compileall's CLI is to create
617    # PEP 3147/PEP 488 pyc files.
618    for name, ext, switch in [
619        ('normal', 'pyc', []),
620        ('optimize', 'opt-1.pyc', ['-O']),
621        ('doubleoptimize', 'opt-2.pyc', ['-OO']),
622    ]:
623        def f(self, ext=ext, switch=switch):
624            script_helper.assert_python_ok(*(switch +
625                ['-m', 'compileall', '-q', self.pkgdir]))
626            # Verify the __pycache__ directory contents.
627            self.assertTrue(os.path.exists(self.pkgdir_cachedir))
628            expected = sorted(base.format(sys.implementation.cache_tag, ext)
629                              for base in ('__init__.{}.{}', 'bar.{}.{}'))
630            self.assertEqual(sorted(os.listdir(self.pkgdir_cachedir)), expected)
631            # Make sure there are no .pyc files in the source directory.
632            self.assertFalse([fn for fn in os.listdir(self.pkgdir)
633                              if fn.endswith(ext)])
634        locals()['test_pep3147_paths_' + name] = f
635
636    def test_legacy_paths(self):
637        # Ensure that with the proper switch, compileall leaves legacy
638        # pyc files, and no __pycache__ directory.
639        self.assertRunOK('-b', '-q', self.pkgdir)
640        # Verify the __pycache__ directory contents.
641        self.assertFalse(os.path.exists(self.pkgdir_cachedir))
642        expected = sorted(['__init__.py', '__init__.pyc', 'bar.py',
643                           'bar.pyc'])
644        self.assertEqual(sorted(os.listdir(self.pkgdir)), expected)
645
646    def test_multiple_runs(self):
647        # Bug 8527 reported that multiple calls produced empty
648        # __pycache__/__pycache__ directories.
649        self.assertRunOK('-q', self.pkgdir)
650        # Verify the __pycache__ directory contents.
651        self.assertTrue(os.path.exists(self.pkgdir_cachedir))
652        cachecachedir = os.path.join(self.pkgdir_cachedir, '__pycache__')
653        self.assertFalse(os.path.exists(cachecachedir))
654        # Call compileall again.
655        self.assertRunOK('-q', self.pkgdir)
656        self.assertTrue(os.path.exists(self.pkgdir_cachedir))
657        self.assertFalse(os.path.exists(cachecachedir))
658
659    @without_source_date_epoch  # timestamp invalidation test
660    def test_force(self):
661        self.assertRunOK('-q', self.pkgdir)
662        pycpath = importlib.util.cache_from_source(self.barfn)
663        # set atime/mtime backward to avoid file timestamp resolution issues
664        os.utime(pycpath, (time.time()-60,)*2)
665        mtime = os.stat(pycpath).st_mtime
666        # without force, no recompilation
667        self.assertRunOK('-q', self.pkgdir)
668        mtime2 = os.stat(pycpath).st_mtime
669        self.assertEqual(mtime, mtime2)
670        # now force it.
671        self.assertRunOK('-q', '-f', self.pkgdir)
672        mtime2 = os.stat(pycpath).st_mtime
673        self.assertNotEqual(mtime, mtime2)
674
675    def test_recursion_control(self):
676        subpackage = os.path.join(self.pkgdir, 'spam')
677        os.mkdir(subpackage)
678        subinitfn = script_helper.make_script(subpackage, '__init__', '')
679        hamfn = script_helper.make_script(subpackage, 'ham', '')
680        self.assertRunOK('-q', '-l', self.pkgdir)
681        self.assertNotCompiled(subinitfn)
682        self.assertFalse(os.path.exists(os.path.join(subpackage, '__pycache__')))
683        self.assertRunOK('-q', self.pkgdir)
684        self.assertCompiled(subinitfn)
685        self.assertCompiled(hamfn)
686
687    def test_recursion_limit(self):
688        subpackage = os.path.join(self.pkgdir, 'spam')
689        subpackage2 = os.path.join(subpackage, 'ham')
690        subpackage3 = os.path.join(subpackage2, 'eggs')
691        for pkg in (subpackage, subpackage2, subpackage3):
692            script_helper.make_pkg(pkg)
693
694        subinitfn = os.path.join(subpackage, '__init__.py')
695        hamfn = script_helper.make_script(subpackage, 'ham', '')
696        spamfn = script_helper.make_script(subpackage2, 'spam', '')
697        eggfn = script_helper.make_script(subpackage3, 'egg', '')
698
699        self.assertRunOK('-q', '-r 0', self.pkgdir)
700        self.assertNotCompiled(subinitfn)
701        self.assertFalse(
702            os.path.exists(os.path.join(subpackage, '__pycache__')))
703
704        self.assertRunOK('-q', '-r 1', self.pkgdir)
705        self.assertCompiled(subinitfn)
706        self.assertCompiled(hamfn)
707        self.assertNotCompiled(spamfn)
708
709        self.assertRunOK('-q', '-r 2', self.pkgdir)
710        self.assertCompiled(subinitfn)
711        self.assertCompiled(hamfn)
712        self.assertCompiled(spamfn)
713        self.assertNotCompiled(eggfn)
714
715        self.assertRunOK('-q', '-r 5', self.pkgdir)
716        self.assertCompiled(subinitfn)
717        self.assertCompiled(hamfn)
718        self.assertCompiled(spamfn)
719        self.assertCompiled(eggfn)
720
721    @os_helper.skip_unless_symlink
722    def test_symlink_loop(self):
723        # Currently, compileall ignores symlinks to directories.
724        # If that limitation is ever lifted, it should protect against
725        # recursion in symlink loops.
726        pkg = os.path.join(self.pkgdir, 'spam')
727        script_helper.make_pkg(pkg)
728        os.symlink('.', os.path.join(pkg, 'evil'))
729        os.symlink('.', os.path.join(pkg, 'evil2'))
730        self.assertRunOK('-q', self.pkgdir)
731        self.assertCompiled(os.path.join(
732            self.pkgdir, 'spam', 'evil', 'evil2', '__init__.py'
733        ))
734
735    def test_quiet(self):
736        noisy = self.assertRunOK(self.pkgdir)
737        quiet = self.assertRunOK('-q', self.pkgdir)
738        self.assertNotEqual(b'', noisy)
739        self.assertEqual(b'', quiet)
740
741    def test_silent(self):
742        script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax')
743        _, quiet, _ = self.assertRunNotOK('-q', self.pkgdir)
744        _, silent, _ = self.assertRunNotOK('-qq', self.pkgdir)
745        self.assertNotEqual(b'', quiet)
746        self.assertEqual(b'', silent)
747
748    def test_regexp(self):
749        self.assertRunOK('-q', '-x', r'ba[^\\/]*$', self.pkgdir)
750        self.assertNotCompiled(self.barfn)
751        self.assertCompiled(self.initfn)
752
753    def test_multiple_dirs(self):
754        pkgdir2 = os.path.join(self.directory, 'foo2')
755        os.mkdir(pkgdir2)
756        init2fn = script_helper.make_script(pkgdir2, '__init__', '')
757        bar2fn = script_helper.make_script(pkgdir2, 'bar2', '')
758        self.assertRunOK('-q', self.pkgdir, pkgdir2)
759        self.assertCompiled(self.initfn)
760        self.assertCompiled(self.barfn)
761        self.assertCompiled(init2fn)
762        self.assertCompiled(bar2fn)
763
764    def test_d_compile_error(self):
765        script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax')
766        rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir)
767        self.assertRegex(out, b'File "dinsdale')
768
769    def test_d_runtime_error(self):
770        bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception')
771        self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir)
772        fn = script_helper.make_script(self.pkgdir, 'bing', 'import baz')
773        pyc = importlib.util.cache_from_source(bazfn)
774        os.rename(pyc, os.path.join(self.pkgdir, 'baz.pyc'))
775        os.remove(bazfn)
776        rc, out, err = script_helper.assert_python_failure(fn, __isolated=False)
777        self.assertRegex(err, b'File "dinsdale')
778
779    def test_include_bad_file(self):
780        rc, out, err = self.assertRunNotOK(
781            '-i', os.path.join(self.directory, 'nosuchfile'), self.pkgdir)
782        self.assertRegex(out, b'rror.*nosuchfile')
783        self.assertNotRegex(err, b'Traceback')
784        self.assertFalse(os.path.exists(importlib.util.cache_from_source(
785                                            self.pkgdir_cachedir)))
786
787    def test_include_file_with_arg(self):
788        f1 = script_helper.make_script(self.pkgdir, 'f1', '')
789        f2 = script_helper.make_script(self.pkgdir, 'f2', '')
790        f3 = script_helper.make_script(self.pkgdir, 'f3', '')
791        f4 = script_helper.make_script(self.pkgdir, 'f4', '')
792        with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1:
793            l1.write(os.path.join(self.pkgdir, 'f1.py')+os.linesep)
794            l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep)
795        self.assertRunOK('-i', os.path.join(self.directory, 'l1'), f4)
796        self.assertCompiled(f1)
797        self.assertCompiled(f2)
798        self.assertNotCompiled(f3)
799        self.assertCompiled(f4)
800
801    def test_include_file_no_arg(self):
802        f1 = script_helper.make_script(self.pkgdir, 'f1', '')
803        f2 = script_helper.make_script(self.pkgdir, 'f2', '')
804        f3 = script_helper.make_script(self.pkgdir, 'f3', '')
805        f4 = script_helper.make_script(self.pkgdir, 'f4', '')
806        with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1:
807            l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep)
808        self.assertRunOK('-i', os.path.join(self.directory, 'l1'))
809        self.assertNotCompiled(f1)
810        self.assertCompiled(f2)
811        self.assertNotCompiled(f3)
812        self.assertNotCompiled(f4)
813
814    def test_include_on_stdin(self):
815        f1 = script_helper.make_script(self.pkgdir, 'f1', '')
816        f2 = script_helper.make_script(self.pkgdir, 'f2', '')
817        f3 = script_helper.make_script(self.pkgdir, 'f3', '')
818        f4 = script_helper.make_script(self.pkgdir, 'f4', '')
819        p = script_helper.spawn_python(*(self._get_run_args(()) + ['-i', '-']))
820        p.stdin.write((f3+os.linesep).encode('ascii'))
821        script_helper.kill_python(p)
822        self.assertNotCompiled(f1)
823        self.assertNotCompiled(f2)
824        self.assertCompiled(f3)
825        self.assertNotCompiled(f4)
826
827    def test_compiles_as_much_as_possible(self):
828        bingfn = script_helper.make_script(self.pkgdir, 'bing', 'syntax(error')
829        rc, out, err = self.assertRunNotOK('nosuchfile', self.initfn,
830                                           bingfn, self.barfn)
831        self.assertRegex(out, b'rror')
832        self.assertNotCompiled(bingfn)
833        self.assertCompiled(self.initfn)
834        self.assertCompiled(self.barfn)
835
836    def test_invalid_arg_produces_message(self):
837        out = self.assertRunOK('badfilename')
838        self.assertRegex(out, b"Can't list 'badfilename'")
839
840    def test_pyc_invalidation_mode(self):
841        script_helper.make_script(self.pkgdir, 'f1', '')
842        pyc = importlib.util.cache_from_source(
843            os.path.join(self.pkgdir, 'f1.py'))
844        self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir)
845        with open(pyc, 'rb') as fp:
846            data = fp.read()
847        self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11)
848        self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir)
849        with open(pyc, 'rb') as fp:
850            data = fp.read()
851        self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b01)
852
853    @skipUnless(_have_multiprocessing, "requires multiprocessing")
854    def test_workers(self):
855        bar2fn = script_helper.make_script(self.directory, 'bar2', '')
856        files = []
857        for suffix in range(5):
858            pkgdir = os.path.join(self.directory, 'foo{}'.format(suffix))
859            os.mkdir(pkgdir)
860            fn = script_helper.make_script(pkgdir, '__init__', '')
861            files.append(script_helper.make_script(pkgdir, 'bar2', ''))
862
863        self.assertRunOK(self.directory, '-j', '0')
864        self.assertCompiled(bar2fn)
865        for file in files:
866            self.assertCompiled(file)
867
868    @mock.patch('compileall.compile_dir')
869    def test_workers_available_cores(self, compile_dir):
870        with mock.patch("sys.argv",
871                        new=[sys.executable, self.directory, "-j0"]):
872            compileall.main()
873            self.assertTrue(compile_dir.called)
874            self.assertEqual(compile_dir.call_args[-1]['workers'], 0)
875
876    def test_strip_and_prepend(self):
877        fullpath = ["test", "build", "real", "path"]
878        path = os.path.join(self.directory, *fullpath)
879        os.makedirs(path)
880        script = script_helper.make_script(path, "test", "1 / 0")
881        bc = importlib.util.cache_from_source(script)
882        stripdir = os.path.join(self.directory, *fullpath[:2])
883        prependdir = "/foo"
884        self.assertRunOK("-s", stripdir, "-p", prependdir, path)
885        rc, out, err = script_helper.assert_python_failure(bc)
886        expected_in = os.path.join(prependdir, *fullpath[2:])
887        self.assertIn(
888            expected_in,
889            str(err, encoding=sys.getdefaultencoding())
890        )
891        self.assertNotIn(
892            stripdir,
893            str(err, encoding=sys.getdefaultencoding())
894        )
895
896    def test_multiple_optimization_levels(self):
897        path = os.path.join(self.directory, "optimizations")
898        os.makedirs(path)
899        script = script_helper.make_script(path,
900                                           "test_optimization",
901                                           "a = 0")
902        bc = []
903        for opt_level in "", 1, 2, 3:
904            bc.append(importlib.util.cache_from_source(script,
905                                                       optimization=opt_level))
906        test_combinations = [["0", "1"],
907                             ["1", "2"],
908                             ["0", "2"],
909                             ["0", "1", "2"]]
910        for opt_combination in test_combinations:
911            self.assertRunOK(path, *("-o" + str(n) for n in opt_combination))
912            for opt_level in opt_combination:
913                self.assertTrue(os.path.isfile(bc[int(opt_level)]))
914                try:
915                    os.unlink(bc[opt_level])
916                except Exception:
917                    pass
918
919    @os_helper.skip_unless_symlink
920    def test_ignore_symlink_destination(self):
921        # Create folders for allowed files, symlinks and prohibited area
922        allowed_path = os.path.join(self.directory, "test", "dir", "allowed")
923        symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks")
924        prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited")
925        os.makedirs(allowed_path)
926        os.makedirs(symlinks_path)
927        os.makedirs(prohibited_path)
928
929        # Create scripts and symlinks and remember their byte-compiled versions
930        allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0")
931        prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0")
932        allowed_symlink = os.path.join(symlinks_path, "test_allowed.py")
933        prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py")
934        os.symlink(allowed_script, allowed_symlink)
935        os.symlink(prohibited_script, prohibited_symlink)
936        allowed_bc = importlib.util.cache_from_source(allowed_symlink)
937        prohibited_bc = importlib.util.cache_from_source(prohibited_symlink)
938
939        self.assertRunOK(symlinks_path, "-e", allowed_path)
940
941        self.assertTrue(os.path.isfile(allowed_bc))
942        self.assertFalse(os.path.isfile(prohibited_bc))
943
944    def test_hardlink_bad_args(self):
945        # Bad arguments combination, hardlink deduplication make sense
946        # only for more than one optimization level
947        self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes")
948
949    def test_hardlink(self):
950        # 'a = 0' code produces the same bytecode for the 3 optimization
951        # levels. All three .pyc files must have the same inode (hardlinks).
952        #
953        # If deduplication is disabled, all pyc files must have different
954        # inodes.
955        for dedup in (True, False):
956            with tempfile.TemporaryDirectory() as path:
957                with self.subTest(dedup=dedup):
958                    script = script_helper.make_script(path, "script", "a = 0")
959                    pycs = get_pycs(script)
960
961                    args = ["-q", "-o 0", "-o 1", "-o 2"]
962                    if dedup:
963                        args.append("--hardlink-dupes")
964                    self.assertRunOK(path, *args)
965
966                    self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup)
967                    self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup)
968                    self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup)
969
970
971class CommandLineTestsWithSourceEpoch(CommandLineTestsBase,
972                                       unittest.TestCase,
973                                       metaclass=SourceDateEpochTestMeta,
974                                       source_date_epoch=True):
975    pass
976
977
978class CommandLineTestsNoSourceEpoch(CommandLineTestsBase,
979                                     unittest.TestCase,
980                                     metaclass=SourceDateEpochTestMeta,
981                                     source_date_epoch=False):
982    pass
983
984
985
986@os_helper.skip_unless_hardlink
987class HardlinkDedupTestsBase:
988    # Test hardlink_dupes parameter of compileall.compile_dir()
989
990    def setUp(self):
991        self.path = None
992
993    @contextlib.contextmanager
994    def temporary_directory(self):
995        with tempfile.TemporaryDirectory() as path:
996            self.path = path
997            yield path
998            self.path = None
999
1000    def make_script(self, code, name="script"):
1001        return script_helper.make_script(self.path, name, code)
1002
1003    def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False):
1004        compileall.compile_dir(self.path, quiet=True, optimize=optimize,
1005                               hardlink_dupes=dedup, force=force)
1006
1007    def test_bad_args(self):
1008        # Bad arguments combination, hardlink deduplication make sense
1009        # only for more than one optimization level
1010        with self.temporary_directory():
1011            self.make_script("pass")
1012            with self.assertRaises(ValueError):
1013                compileall.compile_dir(self.path, quiet=True, optimize=0,
1014                                       hardlink_dupes=True)
1015            with self.assertRaises(ValueError):
1016                # same optimization level specified twice:
1017                # compile_dir() removes duplicates
1018                compileall.compile_dir(self.path, quiet=True, optimize=[0, 0],
1019                                       hardlink_dupes=True)
1020
1021    def create_code(self, docstring=False, assertion=False):
1022        lines = []
1023        if docstring:
1024            lines.append("'module docstring'")
1025        lines.append('x = 1')
1026        if assertion:
1027            lines.append("assert x == 1")
1028        return '\n'.join(lines)
1029
1030    def iter_codes(self):
1031        for docstring in (False, True):
1032            for assertion in (False, True):
1033                code = self.create_code(docstring=docstring, assertion=assertion)
1034                yield (code, docstring, assertion)
1035
1036    def test_disabled(self):
1037        # Deduplication disabled, no hardlinks
1038        for code, docstring, assertion in self.iter_codes():
1039            with self.subTest(docstring=docstring, assertion=assertion):
1040                with self.temporary_directory():
1041                    script = self.make_script(code)
1042                    pycs = get_pycs(script)
1043                    self.compile_dir(dedup=False)
1044                    self.assertFalse(is_hardlink(pycs[0], pycs[1]))
1045                    self.assertFalse(is_hardlink(pycs[0], pycs[2]))
1046                    self.assertFalse(is_hardlink(pycs[1], pycs[2]))
1047
1048    def check_hardlinks(self, script, docstring=False, assertion=False):
1049        pycs = get_pycs(script)
1050        self.assertEqual(is_hardlink(pycs[0], pycs[1]),
1051                         not assertion)
1052        self.assertEqual(is_hardlink(pycs[0], pycs[2]),
1053                         not assertion and not docstring)
1054        self.assertEqual(is_hardlink(pycs[1], pycs[2]),
1055                         not docstring)
1056
1057    def test_hardlink(self):
1058        # Test deduplication on all combinations
1059        for code, docstring, assertion in self.iter_codes():
1060            with self.subTest(docstring=docstring, assertion=assertion):
1061                with self.temporary_directory():
1062                    script = self.make_script(code)
1063                    self.compile_dir()
1064                    self.check_hardlinks(script, docstring, assertion)
1065
1066    def test_only_two_levels(self):
1067        # Don't build the 3 optimization levels, but only 2
1068        for opts in ((0, 1), (1, 2), (0, 2)):
1069            with self.subTest(opts=opts):
1070                with self.temporary_directory():
1071                    # code with no dostring and no assertion:
1072                    # same bytecode for all optimization levels
1073                    script = self.make_script(self.create_code())
1074                    self.compile_dir(optimize=opts)
1075                    pyc1 = get_pyc(script, opts[0])
1076                    pyc2 = get_pyc(script, opts[1])
1077                    self.assertTrue(is_hardlink(pyc1, pyc2))
1078
1079    def test_duplicated_levels(self):
1080        # compile_dir() must not fail if optimize contains duplicated
1081        # optimization levels and/or if optimization levels are not sorted.
1082        with self.temporary_directory():
1083            # code with no dostring and no assertion:
1084            # same bytecode for all optimization levels
1085            script = self.make_script(self.create_code())
1086            self.compile_dir(optimize=[1, 0, 1, 0])
1087            pyc1 = get_pyc(script, 0)
1088            pyc2 = get_pyc(script, 1)
1089            self.assertTrue(is_hardlink(pyc1, pyc2))
1090
1091    def test_recompilation(self):
1092        # Test compile_dir() when pyc files already exists and the script
1093        # content changed
1094        with self.temporary_directory():
1095            script = self.make_script("a = 0")
1096            self.compile_dir()
1097            # All three levels have the same inode
1098            self.check_hardlinks(script)
1099
1100            pycs = get_pycs(script)
1101            inode = os.stat(pycs[0]).st_ino
1102
1103            # Change of the module content
1104            script = self.make_script("print(0)")
1105
1106            # Recompilation without -o 1
1107            self.compile_dir(optimize=[0, 2], force=True)
1108
1109            # opt-1.pyc should have the same inode as before and others should not
1110            self.assertEqual(inode, os.stat(pycs[1]).st_ino)
1111            self.assertTrue(is_hardlink(pycs[0], pycs[2]))
1112            self.assertNotEqual(inode, os.stat(pycs[2]).st_ino)
1113            # opt-1.pyc and opt-2.pyc have different content
1114            self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True))
1115
1116    def test_import(self):
1117        # Test that import updates a single pyc file when pyc files already
1118        # exists and the script content changed
1119        with self.temporary_directory():
1120            script = self.make_script(self.create_code(), name="module")
1121            self.compile_dir()
1122            # All three levels have the same inode
1123            self.check_hardlinks(script)
1124
1125            pycs = get_pycs(script)
1126            inode = os.stat(pycs[0]).st_ino
1127
1128            # Change of the module content
1129            script = self.make_script("print(0)", name="module")
1130
1131            # Import the module in Python with -O (optimization level 1)
1132            script_helper.assert_python_ok(
1133                "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path
1134            )
1135
1136            # Only opt-1.pyc is changed
1137            self.assertEqual(inode, os.stat(pycs[0]).st_ino)
1138            self.assertEqual(inode, os.stat(pycs[2]).st_ino)
1139            self.assertFalse(is_hardlink(pycs[1], pycs[2]))
1140            # opt-1.pyc and opt-2.pyc have different content
1141            self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True))
1142
1143
1144class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase,
1145                                        unittest.TestCase,
1146                                        metaclass=SourceDateEpochTestMeta,
1147                                        source_date_epoch=True):
1148    pass
1149
1150
1151class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase,
1152                                      unittest.TestCase,
1153                                      metaclass=SourceDateEpochTestMeta,
1154                                      source_date_epoch=False):
1155    pass
1156
1157
1158if __name__ == "__main__":
1159    unittest.main()
1160