• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1import io
2import time
3import unittest
4import tokenize
5from functools import partial
6from threading import Thread
7
8from test.support import threading_helper
9
10
11@threading_helper.requires_working_threading()
12class TestTokenize(unittest.TestCase):
13    def test_tokenizer_iter(self):
14        source = io.StringIO("for _ in a:\n  pass")
15        it = tokenize._tokenize.TokenizerIter(source.readline, extra_tokens=False)
16
17        tokens = []
18        def next_token(it):
19            while True:
20                try:
21                    r = next(it)
22                    tokens.append(tokenize.TokenInfo._make(r))
23                    time.sleep(0.03)
24                except StopIteration:
25                    return
26
27        threads = []
28        for _ in range(5):
29            threads.append(Thread(target=partial(next_token, it)))
30
31        for thread in threads:
32            thread.start()
33
34        for thread in threads:
35            thread.join()
36
37        expected_tokens = [
38            tokenize.TokenInfo(type=1, string='for', start=(1, 0), end=(1, 3), line='for _ in a:\n'),
39            tokenize.TokenInfo(type=1, string='_', start=(1, 4), end=(1, 5), line='for _ in a:\n'),
40            tokenize.TokenInfo(type=1, string='in', start=(1, 6), end=(1, 8), line='for _ in a:\n'),
41            tokenize.TokenInfo(type=1, string='a', start=(1, 9), end=(1, 10), line='for _ in a:\n'),
42            tokenize.TokenInfo(type=11, string=':', start=(1, 10), end=(1, 11), line='for _ in a:\n'),
43            tokenize.TokenInfo(type=4, string='', start=(1, 11), end=(1, 11), line='for _ in a:\n'),
44            tokenize.TokenInfo(type=5, string='', start=(2, -1), end=(2, -1), line='  pass'),
45            tokenize.TokenInfo(type=1, string='pass', start=(2, 2), end=(2, 6), line='  pass'),
46            tokenize.TokenInfo(type=4, string='', start=(2, 6), end=(2, 6), line='  pass'),
47            tokenize.TokenInfo(type=6, string='', start=(2, -1), end=(2, -1), line='  pass'),
48            tokenize.TokenInfo(type=0, string='', start=(2, -1), end=(2, -1), line='  pass'),
49        ]
50
51        tokens.sort()
52        expected_tokens.sort()
53        self.assertListEqual(tokens, expected_tokens)
54
55
56if __name__ == "__main__":
57    unittest.main()
58