• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# This is a variant of the very old (early 90's) file
2# Demo/threads/bug.py.  It simply provokes a number of threads into
3# trying to import the same module "at the same time".
4# There are no pleasant failure modes -- most likely is that Python
5# complains several times about module random having no attribute
6# randrange, and then Python hangs.
7
8import _imp as imp
9import os
10import importlib
11import sys
12import time
13import shutil
14import threading
15import unittest
16from unittest import mock
17from test.support import (
18    verbose, import_module, run_unittest, TESTFN, reap_threads,
19    forget, unlink, rmtree, start_threads)
20
21def task(N, done, done_tasks, errors):
22    try:
23        # We don't use modulefinder but still import it in order to stress
24        # importing of different modules from several threads.
25        if len(done_tasks) % 2:
26            import modulefinder
27            import random
28        else:
29            import random
30            import modulefinder
31        # This will fail if random is not completely initialized
32        x = random.randrange(1, 3)
33    except Exception as e:
34        errors.append(e.with_traceback(None))
35    finally:
36        done_tasks.append(threading.get_ident())
37        finished = len(done_tasks) == N
38        if finished:
39            done.set()
40
41def mock_register_at_fork(func):
42    # bpo-30599: Mock os.register_at_fork() when importing the random module,
43    # since this function doesn't allow to unregister callbacks and would leak
44    # memory.
45    return mock.patch('os.register_at_fork', create=True)(func)
46
47# Create a circular import structure: A -> C -> B -> D -> A
48# NOTE: `time` is already loaded and therefore doesn't threaten to deadlock.
49
50circular_imports_modules = {
51    'A': """if 1:
52        import time
53        time.sleep(%(delay)s)
54        x = 'a'
55        import C
56        """,
57    'B': """if 1:
58        import time
59        time.sleep(%(delay)s)
60        x = 'b'
61        import D
62        """,
63    'C': """import B""",
64    'D': """import A""",
65}
66
67class Finder:
68    """A dummy finder to detect concurrent access to its find_spec()
69    method."""
70
71    def __init__(self):
72        self.numcalls = 0
73        self.x = 0
74        self.lock = threading.Lock()
75
76    def find_spec(self, name, path=None, target=None):
77        # Simulate some thread-unsafe behaviour. If calls to find_spec()
78        # are properly serialized, `x` will end up the same as `numcalls`.
79        # Otherwise not.
80        assert imp.lock_held()
81        with self.lock:
82            self.numcalls += 1
83        x = self.x
84        time.sleep(0.01)
85        self.x = x + 1
86
87class FlushingFinder:
88    """A dummy finder which flushes sys.path_importer_cache when it gets
89    called."""
90
91    def find_spec(self, name, path=None, target=None):
92        sys.path_importer_cache.clear()
93
94
95class ThreadedImportTests(unittest.TestCase):
96
97    def setUp(self):
98        self.old_random = sys.modules.pop('random', None)
99
100    def tearDown(self):
101        # If the `random` module was already initialized, we restore the
102        # old module at the end so that pickling tests don't fail.
103        # See http://bugs.python.org/issue3657#msg110461
104        if self.old_random is not None:
105            sys.modules['random'] = self.old_random
106
107    @mock_register_at_fork
108    def check_parallel_module_init(self, mock_os):
109        if imp.lock_held():
110            # This triggers on, e.g., from test import autotest.
111            raise unittest.SkipTest("can't run when import lock is held")
112
113        done = threading.Event()
114        for N in (20, 50) * 3:
115            if verbose:
116                print("Trying", N, "threads ...", end=' ')
117            # Make sure that random and modulefinder get reimported freshly
118            for modname in ['random', 'modulefinder']:
119                try:
120                    del sys.modules[modname]
121                except KeyError:
122                    pass
123            errors = []
124            done_tasks = []
125            done.clear()
126            t0 = time.monotonic()
127            with start_threads(threading.Thread(target=task,
128                                                args=(N, done, done_tasks, errors,))
129                               for i in range(N)):
130                pass
131            completed = done.wait(10 * 60)
132            dt = time.monotonic() - t0
133            if verbose:
134                print("%.1f ms" % (dt*1e3), flush=True, end=" ")
135            dbg_info = 'done: %s/%s' % (len(done_tasks), N)
136            self.assertFalse(errors, dbg_info)
137            self.assertTrue(completed, dbg_info)
138            if verbose:
139                print("OK.")
140
141    def test_parallel_module_init(self):
142        self.check_parallel_module_init()
143
144    def test_parallel_meta_path(self):
145        finder = Finder()
146        sys.meta_path.insert(0, finder)
147        try:
148            self.check_parallel_module_init()
149            self.assertGreater(finder.numcalls, 0)
150            self.assertEqual(finder.x, finder.numcalls)
151        finally:
152            sys.meta_path.remove(finder)
153
154    def test_parallel_path_hooks(self):
155        # Here the Finder instance is only used to check concurrent calls
156        # to path_hook().
157        finder = Finder()
158        # In order for our path hook to be called at each import, we need
159        # to flush the path_importer_cache, which we do by registering a
160        # dedicated meta_path entry.
161        flushing_finder = FlushingFinder()
162        def path_hook(path):
163            finder.find_spec('')
164            raise ImportError
165        sys.path_hooks.insert(0, path_hook)
166        sys.meta_path.append(flushing_finder)
167        try:
168            # Flush the cache a first time
169            flushing_finder.find_spec('')
170            numtests = self.check_parallel_module_init()
171            self.assertGreater(finder.numcalls, 0)
172            self.assertEqual(finder.x, finder.numcalls)
173        finally:
174            sys.meta_path.remove(flushing_finder)
175            sys.path_hooks.remove(path_hook)
176
177    def test_import_hangers(self):
178        # In case this test is run again, make sure the helper module
179        # gets loaded from scratch again.
180        try:
181            del sys.modules['test.threaded_import_hangers']
182        except KeyError:
183            pass
184        import test.threaded_import_hangers
185        self.assertFalse(test.threaded_import_hangers.errors)
186
187    def test_circular_imports(self):
188        # The goal of this test is to exercise implementations of the import
189        # lock which use a per-module lock, rather than a global lock.
190        # In these implementations, there is a possible deadlock with
191        # circular imports, for example:
192        # - thread 1 imports A (grabbing the lock for A) which imports B
193        # - thread 2 imports B (grabbing the lock for B) which imports A
194        # Such implementations should be able to detect such situations and
195        # resolve them one way or the other, without freezing.
196        # NOTE: our test constructs a slightly less trivial import cycle,
197        # in order to better stress the deadlock avoidance mechanism.
198        delay = 0.5
199        os.mkdir(TESTFN)
200        self.addCleanup(shutil.rmtree, TESTFN)
201        sys.path.insert(0, TESTFN)
202        self.addCleanup(sys.path.remove, TESTFN)
203        for name, contents in circular_imports_modules.items():
204            contents = contents % {'delay': delay}
205            with open(os.path.join(TESTFN, name + ".py"), "wb") as f:
206                f.write(contents.encode('utf-8'))
207            self.addCleanup(forget, name)
208
209        importlib.invalidate_caches()
210        results = []
211        def import_ab():
212            import A
213            results.append(getattr(A, 'x', None))
214        def import_ba():
215            import B
216            results.append(getattr(B, 'x', None))
217        t1 = threading.Thread(target=import_ab)
218        t2 = threading.Thread(target=import_ba)
219        t1.start()
220        t2.start()
221        t1.join()
222        t2.join()
223        self.assertEqual(set(results), {'a', 'b'})
224
225    @mock_register_at_fork
226    def test_side_effect_import(self, mock_os):
227        code = """if 1:
228            import threading
229            def target():
230                import random
231            t = threading.Thread(target=target)
232            t.start()
233            t.join()
234            t = None"""
235        sys.path.insert(0, os.curdir)
236        self.addCleanup(sys.path.remove, os.curdir)
237        filename = TESTFN + ".py"
238        with open(filename, "wb") as f:
239            f.write(code.encode('utf-8'))
240        self.addCleanup(unlink, filename)
241        self.addCleanup(forget, TESTFN)
242        self.addCleanup(rmtree, '__pycache__')
243        importlib.invalidate_caches()
244        __import__(TESTFN)
245        del sys.modules[TESTFN]
246
247
248@reap_threads
249def test_main():
250    old_switchinterval = None
251    try:
252        old_switchinterval = sys.getswitchinterval()
253        sys.setswitchinterval(1e-5)
254    except AttributeError:
255        pass
256    try:
257        run_unittest(ThreadedImportTests)
258    finally:
259        if old_switchinterval is not None:
260            sys.setswitchinterval(old_switchinterval)
261
262if __name__ == "__main__":
263    test_main()
264