• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1import copy
2import gc
3import pickle
4import sys
5import unittest
6import weakref
7import inspect
8
9from test import support
10
11try:
12    import _testcapi
13except ImportError:
14    _testcapi = None
15
16
17# This tests to make sure that if a SIGINT arrives just before we send into a
18# yield from chain, the KeyboardInterrupt is raised in the innermost
19# generator (see bpo-30039).
20@unittest.skipUnless(_testcapi is not None and
21                     hasattr(_testcapi, "raise_SIGINT_then_send_None"),
22                     "needs _testcapi.raise_SIGINT_then_send_None")
23class SignalAndYieldFromTest(unittest.TestCase):
24
25    def generator1(self):
26        return (yield from self.generator2())
27
28    def generator2(self):
29        try:
30            yield
31        except KeyboardInterrupt:
32            return "PASSED"
33        else:
34            return "FAILED"
35
36    def test_raise_and_yield_from(self):
37        gen = self.generator1()
38        gen.send(None)
39        try:
40            _testcapi.raise_SIGINT_then_send_None(gen)
41        except BaseException as _exc:
42            exc = _exc
43        self.assertIs(type(exc), StopIteration)
44        self.assertEqual(exc.value, "PASSED")
45
46
47class FinalizationTest(unittest.TestCase):
48
49    def test_frame_resurrect(self):
50        # A generator frame can be resurrected by a generator's finalization.
51        def gen():
52            nonlocal frame
53            try:
54                yield
55            finally:
56                frame = sys._getframe()
57
58        g = gen()
59        wr = weakref.ref(g)
60        next(g)
61        del g
62        support.gc_collect()
63        self.assertIs(wr(), None)
64        self.assertTrue(frame)
65        del frame
66        support.gc_collect()
67
68    def test_refcycle(self):
69        # A generator caught in a refcycle gets finalized anyway.
70        old_garbage = gc.garbage[:]
71        finalized = False
72        def gen():
73            nonlocal finalized
74            try:
75                g = yield
76                yield 1
77            finally:
78                finalized = True
79
80        g = gen()
81        next(g)
82        g.send(g)
83        self.assertGreater(sys.getrefcount(g), 2)
84        self.assertFalse(finalized)
85        del g
86        support.gc_collect()
87        self.assertTrue(finalized)
88        self.assertEqual(gc.garbage, old_garbage)
89
90    def test_lambda_generator(self):
91        # Issue #23192: Test that a lambda returning a generator behaves
92        # like the equivalent function
93        f = lambda: (yield 1)
94        def g(): return (yield 1)
95
96        # test 'yield from'
97        f2 = lambda: (yield from g())
98        def g2(): return (yield from g())
99
100        f3 = lambda: (yield from f())
101        def g3(): return (yield from f())
102
103        for gen_fun in (f, g, f2, g2, f3, g3):
104            gen = gen_fun()
105            self.assertEqual(next(gen), 1)
106            with self.assertRaises(StopIteration) as cm:
107                gen.send(2)
108            self.assertEqual(cm.exception.value, 2)
109
110
111class GeneratorTest(unittest.TestCase):
112
113    def test_name(self):
114        def func():
115            yield 1
116
117        # check generator names
118        gen = func()
119        self.assertEqual(gen.__name__, "func")
120        self.assertEqual(gen.__qualname__,
121                         "GeneratorTest.test_name.<locals>.func")
122
123        # modify generator names
124        gen.__name__ = "name"
125        gen.__qualname__ = "qualname"
126        self.assertEqual(gen.__name__, "name")
127        self.assertEqual(gen.__qualname__, "qualname")
128
129        # generator names must be a string and cannot be deleted
130        self.assertRaises(TypeError, setattr, gen, '__name__', 123)
131        self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
132        self.assertRaises(TypeError, delattr, gen, '__name__')
133        self.assertRaises(TypeError, delattr, gen, '__qualname__')
134
135        # modify names of the function creating the generator
136        func.__qualname__ = "func_qualname"
137        func.__name__ = "func_name"
138        gen = func()
139        self.assertEqual(gen.__name__, "func_name")
140        self.assertEqual(gen.__qualname__, "func_qualname")
141
142        # unnamed generator
143        gen = (x for x in range(10))
144        self.assertEqual(gen.__name__,
145                         "<genexpr>")
146        self.assertEqual(gen.__qualname__,
147                         "GeneratorTest.test_name.<locals>.<genexpr>")
148
149    def test_copy(self):
150        def f():
151            yield 1
152        g = f()
153        with self.assertRaises(TypeError):
154            copy.copy(g)
155
156    def test_pickle(self):
157        def f():
158            yield 1
159        g = f()
160        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
161            with self.assertRaises((TypeError, pickle.PicklingError)):
162                pickle.dumps(g, proto)
163
164
165class ExceptionTest(unittest.TestCase):
166    # Tests for the issue #23353: check that the currently handled exception
167    # is correctly saved/restored in PyEval_EvalFrameEx().
168
169    def test_except_throw(self):
170        def store_raise_exc_generator():
171            try:
172                self.assertEqual(sys.exc_info()[0], None)
173                yield
174            except Exception as exc:
175                # exception raised by gen.throw(exc)
176                self.assertEqual(sys.exc_info()[0], ValueError)
177                self.assertIsNone(exc.__context__)
178                yield
179
180                # ensure that the exception is not lost
181                self.assertEqual(sys.exc_info()[0], ValueError)
182                yield
183
184                # we should be able to raise back the ValueError
185                raise
186
187        make = store_raise_exc_generator()
188        next(make)
189
190        try:
191            raise ValueError()
192        except Exception as exc:
193            try:
194                make.throw(exc)
195            except Exception:
196                pass
197
198        next(make)
199        with self.assertRaises(ValueError) as cm:
200            next(make)
201        self.assertIsNone(cm.exception.__context__)
202
203        self.assertEqual(sys.exc_info(), (None, None, None))
204
205    def test_except_next(self):
206        def gen():
207            self.assertEqual(sys.exc_info()[0], ValueError)
208            yield "done"
209
210        g = gen()
211        try:
212            raise ValueError
213        except Exception:
214            self.assertEqual(next(g), "done")
215        self.assertEqual(sys.exc_info(), (None, None, None))
216
217    def test_except_gen_except(self):
218        def gen():
219            try:
220                self.assertEqual(sys.exc_info()[0], None)
221                yield
222                # we are called from "except ValueError:", TypeError must
223                # inherit ValueError in its context
224                raise TypeError()
225            except TypeError as exc:
226                self.assertEqual(sys.exc_info()[0], TypeError)
227                self.assertEqual(type(exc.__context__), ValueError)
228            # here we are still called from the "except ValueError:"
229            self.assertEqual(sys.exc_info()[0], ValueError)
230            yield
231            self.assertIsNone(sys.exc_info()[0])
232            yield "done"
233
234        g = gen()
235        next(g)
236        try:
237            raise ValueError
238        except Exception:
239            next(g)
240
241        self.assertEqual(next(g), "done")
242        self.assertEqual(sys.exc_info(), (None, None, None))
243
244    def test_except_throw_exception_context(self):
245        def gen():
246            try:
247                try:
248                    self.assertEqual(sys.exc_info()[0], None)
249                    yield
250                except ValueError:
251                    # we are called from "except ValueError:"
252                    self.assertEqual(sys.exc_info()[0], ValueError)
253                    raise TypeError()
254            except Exception as exc:
255                self.assertEqual(sys.exc_info()[0], TypeError)
256                self.assertEqual(type(exc.__context__), ValueError)
257            # we are still called from "except ValueError:"
258            self.assertEqual(sys.exc_info()[0], ValueError)
259            yield
260            self.assertIsNone(sys.exc_info()[0])
261            yield "done"
262
263        g = gen()
264        next(g)
265        try:
266            raise ValueError
267        except Exception as exc:
268            g.throw(exc)
269
270        self.assertEqual(next(g), "done")
271        self.assertEqual(sys.exc_info(), (None, None, None))
272
273    def test_stopiteration_error(self):
274        # See also PEP 479.
275
276        def gen():
277            raise StopIteration
278            yield
279
280        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
281            next(gen())
282
283    def test_tutorial_stopiteration(self):
284        # Raise StopIteration" stops the generator too:
285
286        def f():
287            yield 1
288            raise StopIteration
289            yield 2 # never reached
290
291        g = f()
292        self.assertEqual(next(g), 1)
293
294        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
295            next(g)
296
297    def test_return_tuple(self):
298        def g():
299            return (yield 1)
300
301        gen = g()
302        self.assertEqual(next(gen), 1)
303        with self.assertRaises(StopIteration) as cm:
304            gen.send((2,))
305        self.assertEqual(cm.exception.value, (2,))
306
307    def test_return_stopiteration(self):
308        def g():
309            return (yield 1)
310
311        gen = g()
312        self.assertEqual(next(gen), 1)
313        with self.assertRaises(StopIteration) as cm:
314            gen.send(StopIteration(2))
315        self.assertIsInstance(cm.exception.value, StopIteration)
316        self.assertEqual(cm.exception.value.value, 2)
317
318
319class GeneratorThrowTest(unittest.TestCase):
320
321    def test_exception_context_with_yield(self):
322        def f():
323            try:
324                raise KeyError('a')
325            except Exception:
326                yield
327
328        gen = f()
329        gen.send(None)
330        with self.assertRaises(ValueError) as cm:
331            gen.throw(ValueError)
332        context = cm.exception.__context__
333        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
334
335    def test_exception_context_with_yield_inside_generator(self):
336        # Check that the context is also available from inside the generator
337        # with yield, as opposed to outside.
338        def f():
339            try:
340                raise KeyError('a')
341            except Exception:
342                try:
343                    yield
344                except Exception as exc:
345                    self.assertEqual(type(exc), ValueError)
346                    context = exc.__context__
347                    self.assertEqual((type(context), context.args),
348                        (KeyError, ('a',)))
349                    yield 'b'
350
351        gen = f()
352        gen.send(None)
353        actual = gen.throw(ValueError)
354        # This ensures that the assertions inside were executed.
355        self.assertEqual(actual, 'b')
356
357    def test_exception_context_with_yield_from(self):
358        def f():
359            yield
360
361        def g():
362            try:
363                raise KeyError('a')
364            except Exception:
365                yield from f()
366
367        gen = g()
368        gen.send(None)
369        with self.assertRaises(ValueError) as cm:
370            gen.throw(ValueError)
371        context = cm.exception.__context__
372        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
373
374    def test_exception_context_with_yield_from_with_context_cycle(self):
375        # Check trying to create an exception context cycle:
376        # https://bugs.python.org/issue40696
377        has_cycle = None
378
379        def f():
380            yield
381
382        def g(exc):
383            nonlocal has_cycle
384            try:
385                raise exc
386            except Exception:
387                try:
388                    yield from f()
389                except Exception as exc:
390                    has_cycle = (exc is exc.__context__)
391            yield
392
393        exc = KeyError('a')
394        gen = g(exc)
395        gen.send(None)
396        gen.throw(exc)
397        # This also distinguishes from the initial has_cycle=None.
398        self.assertEqual(has_cycle, False)
399
400    def test_throw_after_none_exc_type(self):
401        def g():
402            try:
403                raise KeyError
404            except KeyError:
405                pass
406
407            try:
408                yield
409            except Exception:
410                raise RuntimeError
411
412        gen = g()
413        gen.send(None)
414        with self.assertRaises(RuntimeError) as cm:
415            gen.throw(ValueError)
416
417
418class GeneratorStackTraceTest(unittest.TestCase):
419
420    def check_stack_names(self, frame, expected):
421        names = []
422        while frame:
423            name = frame.f_code.co_name
424            # Stop checking frames when we get to our test helper.
425            if name.startswith('check_') or name.startswith('call_'):
426                break
427
428            names.append(name)
429            frame = frame.f_back
430
431        self.assertEqual(names, expected)
432
433    def check_yield_from_example(self, call_method):
434        def f():
435            self.check_stack_names(sys._getframe(), ['f', 'g'])
436            try:
437                yield
438            except Exception:
439                pass
440            self.check_stack_names(sys._getframe(), ['f', 'g'])
441
442        def g():
443            self.check_stack_names(sys._getframe(), ['g'])
444            yield from f()
445            self.check_stack_names(sys._getframe(), ['g'])
446
447        gen = g()
448        gen.send(None)
449        try:
450            call_method(gen)
451        except StopIteration:
452            pass
453
454    def test_send_with_yield_from(self):
455        def call_send(gen):
456            gen.send(None)
457
458        self.check_yield_from_example(call_send)
459
460    def test_throw_with_yield_from(self):
461        def call_throw(gen):
462            gen.throw(RuntimeError)
463
464        self.check_yield_from_example(call_throw)
465
466
467class YieldFromTests(unittest.TestCase):
468    def test_generator_gi_yieldfrom(self):
469        def a():
470            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
471            self.assertIsNone(gen_b.gi_yieldfrom)
472            yield
473            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
474            self.assertIsNone(gen_b.gi_yieldfrom)
475
476        def b():
477            self.assertIsNone(gen_b.gi_yieldfrom)
478            yield from a()
479            self.assertIsNone(gen_b.gi_yieldfrom)
480            yield
481            self.assertIsNone(gen_b.gi_yieldfrom)
482
483        gen_b = b()
484        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
485        self.assertIsNone(gen_b.gi_yieldfrom)
486
487        gen_b.send(None)
488        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
489        self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
490
491        gen_b.send(None)
492        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
493        self.assertIsNone(gen_b.gi_yieldfrom)
494
495        [] = gen_b  # Exhaust generator
496        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
497        self.assertIsNone(gen_b.gi_yieldfrom)
498
499
500tutorial_tests = """
501Let's try a simple generator:
502
503    >>> def f():
504    ...    yield 1
505    ...    yield 2
506
507    >>> for i in f():
508    ...     print(i)
509    1
510    2
511    >>> g = f()
512    >>> next(g)
513    1
514    >>> next(g)
515    2
516
517"Falling off the end" stops the generator:
518
519    >>> next(g)
520    Traceback (most recent call last):
521      File "<stdin>", line 1, in ?
522      File "<stdin>", line 2, in g
523    StopIteration
524
525"return" also stops the generator:
526
527    >>> def f():
528    ...     yield 1
529    ...     return
530    ...     yield 2 # never reached
531    ...
532    >>> g = f()
533    >>> next(g)
534    1
535    >>> next(g)
536    Traceback (most recent call last):
537      File "<stdin>", line 1, in ?
538      File "<stdin>", line 3, in f
539    StopIteration
540    >>> next(g) # once stopped, can't be resumed
541    Traceback (most recent call last):
542      File "<stdin>", line 1, in ?
543    StopIteration
544
545However, "return" and StopIteration are not exactly equivalent:
546
547    >>> def g1():
548    ...     try:
549    ...         return
550    ...     except:
551    ...         yield 1
552    ...
553    >>> list(g1())
554    []
555
556    >>> def g2():
557    ...     try:
558    ...         raise StopIteration
559    ...     except:
560    ...         yield 42
561    >>> print(list(g2()))
562    [42]
563
564This may be surprising at first:
565
566    >>> def g3():
567    ...     try:
568    ...         return
569    ...     finally:
570    ...         yield 1
571    ...
572    >>> list(g3())
573    [1]
574
575Let's create an alternate range() function implemented as a generator:
576
577    >>> def yrange(n):
578    ...     for i in range(n):
579    ...         yield i
580    ...
581    >>> list(yrange(5))
582    [0, 1, 2, 3, 4]
583
584Generators always return to the most recent caller:
585
586    >>> def creator():
587    ...     r = yrange(5)
588    ...     print("creator", next(r))
589    ...     return r
590    ...
591    >>> def caller():
592    ...     r = creator()
593    ...     for i in r:
594    ...             print("caller", i)
595    ...
596    >>> caller()
597    creator 0
598    caller 1
599    caller 2
600    caller 3
601    caller 4
602
603Generators can call other generators:
604
605    >>> def zrange(n):
606    ...     for i in yrange(n):
607    ...         yield i
608    ...
609    >>> list(zrange(5))
610    [0, 1, 2, 3, 4]
611
612"""
613
614# The examples from PEP 255.
615
616pep_tests = """
617
618Specification:  Yield
619
620    Restriction:  A generator cannot be resumed while it is actively
621    running:
622
623    >>> def g():
624    ...     i = next(me)
625    ...     yield i
626    >>> me = g()
627    >>> next(me)
628    Traceback (most recent call last):
629     ...
630      File "<string>", line 2, in g
631    ValueError: generator already executing
632
633Specification: Return
634
635    Note that return isn't always equivalent to raising StopIteration:  the
636    difference lies in how enclosing try/except constructs are treated.
637    For example,
638
639        >>> def f1():
640        ...     try:
641        ...         return
642        ...     except:
643        ...        yield 1
644        >>> print(list(f1()))
645        []
646
647    because, as in any function, return simply exits, but
648
649        >>> def f2():
650        ...     try:
651        ...         raise StopIteration
652        ...     except:
653        ...         yield 42
654        >>> print(list(f2()))
655        [42]
656
657    because StopIteration is captured by a bare "except", as is any
658    exception.
659
660Specification: Generators and Exception Propagation
661
662    >>> def f():
663    ...     return 1//0
664    >>> def g():
665    ...     yield f()  # the zero division exception propagates
666    ...     yield 42   # and we'll never get here
667    >>> k = g()
668    >>> next(k)
669    Traceback (most recent call last):
670      File "<stdin>", line 1, in ?
671      File "<stdin>", line 2, in g
672      File "<stdin>", line 2, in f
673    ZeroDivisionError: integer division or modulo by zero
674    >>> next(k)  # and the generator cannot be resumed
675    Traceback (most recent call last):
676      File "<stdin>", line 1, in ?
677    StopIteration
678    >>>
679
680Specification: Try/Except/Finally
681
682    >>> def f():
683    ...     try:
684    ...         yield 1
685    ...         try:
686    ...             yield 2
687    ...             1//0
688    ...             yield 3  # never get here
689    ...         except ZeroDivisionError:
690    ...             yield 4
691    ...             yield 5
692    ...             raise
693    ...         except:
694    ...             yield 6
695    ...         yield 7     # the "raise" above stops this
696    ...     except:
697    ...         yield 8
698    ...     yield 9
699    ...     try:
700    ...         x = 12
701    ...     finally:
702    ...         yield 10
703    ...     yield 11
704    >>> print(list(f()))
705    [1, 2, 4, 5, 8, 9, 10, 11]
706    >>>
707
708Guido's binary tree example.
709
710    >>> # A binary tree class.
711    >>> class Tree:
712    ...
713    ...     def __init__(self, label, left=None, right=None):
714    ...         self.label = label
715    ...         self.left = left
716    ...         self.right = right
717    ...
718    ...     def __repr__(self, level=0, indent="    "):
719    ...         s = level*indent + repr(self.label)
720    ...         if self.left:
721    ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
722    ...         if self.right:
723    ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
724    ...         return s
725    ...
726    ...     def __iter__(self):
727    ...         return inorder(self)
728
729    >>> # Create a Tree from a list.
730    >>> def tree(list):
731    ...     n = len(list)
732    ...     if n == 0:
733    ...         return []
734    ...     i = n // 2
735    ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
736
737    >>> # Show it off: create a tree.
738    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
739
740    >>> # A recursive generator that generates Tree labels in in-order.
741    >>> def inorder(t):
742    ...     if t:
743    ...         for x in inorder(t.left):
744    ...             yield x
745    ...         yield t.label
746    ...         for x in inorder(t.right):
747    ...             yield x
748
749    >>> # Show it off: create a tree.
750    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
751    >>> # Print the nodes of the tree in in-order.
752    >>> for x in t:
753    ...     print(' '+x, end='')
754     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
755
756    >>> # A non-recursive generator.
757    >>> def inorder(node):
758    ...     stack = []
759    ...     while node:
760    ...         while node.left:
761    ...             stack.append(node)
762    ...             node = node.left
763    ...         yield node.label
764    ...         while not node.right:
765    ...             try:
766    ...                 node = stack.pop()
767    ...             except IndexError:
768    ...                 return
769    ...             yield node.label
770    ...         node = node.right
771
772    >>> # Exercise the non-recursive generator.
773    >>> for x in t:
774    ...     print(' '+x, end='')
775     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
776
777"""
778
779# Examples from Iterator-List and Python-Dev and c.l.py.
780
781email_tests = """
782
783The difference between yielding None and returning it.
784
785>>> def g():
786...     for i in range(3):
787...         yield None
788...     yield None
789...     return
790>>> list(g())
791[None, None, None, None]
792
793Ensure that explicitly raising StopIteration acts like any other exception
794in try/except, not like a return.
795
796>>> def g():
797...     yield 1
798...     try:
799...         raise StopIteration
800...     except:
801...         yield 2
802...     yield 3
803>>> list(g())
804[1, 2, 3]
805
806Next one was posted to c.l.py.
807
808>>> def gcomb(x, k):
809...     "Generate all combinations of k elements from list x."
810...
811...     if k > len(x):
812...         return
813...     if k == 0:
814...         yield []
815...     else:
816...         first, rest = x[0], x[1:]
817...         # A combination does or doesn't contain first.
818...         # If it does, the remainder is a k-1 comb of rest.
819...         for c in gcomb(rest, k-1):
820...             c.insert(0, first)
821...             yield c
822...         # If it doesn't contain first, it's a k comb of rest.
823...         for c in gcomb(rest, k):
824...             yield c
825
826>>> seq = list(range(1, 5))
827>>> for k in range(len(seq) + 2):
828...     print("%d-combs of %s:" % (k, seq))
829...     for c in gcomb(seq, k):
830...         print("   ", c)
8310-combs of [1, 2, 3, 4]:
832    []
8331-combs of [1, 2, 3, 4]:
834    [1]
835    [2]
836    [3]
837    [4]
8382-combs of [1, 2, 3, 4]:
839    [1, 2]
840    [1, 3]
841    [1, 4]
842    [2, 3]
843    [2, 4]
844    [3, 4]
8453-combs of [1, 2, 3, 4]:
846    [1, 2, 3]
847    [1, 2, 4]
848    [1, 3, 4]
849    [2, 3, 4]
8504-combs of [1, 2, 3, 4]:
851    [1, 2, 3, 4]
8525-combs of [1, 2, 3, 4]:
853
854From the Iterators list, about the types of these things.
855
856>>> def g():
857...     yield 1
858...
859>>> type(g)
860<class 'function'>
861>>> i = g()
862>>> type(i)
863<class 'generator'>
864>>> [s for s in dir(i) if not s.startswith('_')]
865['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_yieldfrom', 'send', 'throw']
866>>> from test.support import HAVE_DOCSTRINGS
867>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
868Implement next(self).
869>>> iter(i) is i
870True
871>>> import types
872>>> isinstance(i, types.GeneratorType)
873True
874
875And more, added later.
876
877>>> i.gi_running
8780
879>>> type(i.gi_frame)
880<class 'frame'>
881>>> i.gi_running = 42
882Traceback (most recent call last):
883  ...
884AttributeError: readonly attribute
885>>> def g():
886...     yield me.gi_running
887>>> me = g()
888>>> me.gi_running
8890
890>>> next(me)
8911
892>>> me.gi_running
8930
894
895A clever union-find implementation from c.l.py, due to David Eppstein.
896Sent: Friday, June 29, 2001 12:16 PM
897To: python-list@python.org
898Subject: Re: PEP 255: Simple Generators
899
900>>> class disjointSet:
901...     def __init__(self, name):
902...         self.name = name
903...         self.parent = None
904...         self.generator = self.generate()
905...
906...     def generate(self):
907...         while not self.parent:
908...             yield self
909...         for x in self.parent.generator:
910...             yield x
911...
912...     def find(self):
913...         return next(self.generator)
914...
915...     def union(self, parent):
916...         if self.parent:
917...             raise ValueError("Sorry, I'm not a root!")
918...         self.parent = parent
919...
920...     def __str__(self):
921...         return self.name
922
923>>> names = "ABCDEFGHIJKLM"
924>>> sets = [disjointSet(name) for name in names]
925>>> roots = sets[:]
926
927>>> import random
928>>> gen = random.Random(42)
929>>> while 1:
930...     for s in sets:
931...         print(" %s->%s" % (s, s.find()), end='')
932...     print()
933...     if len(roots) > 1:
934...         s1 = gen.choice(roots)
935...         roots.remove(s1)
936...         s2 = gen.choice(roots)
937...         s1.union(s2)
938...         print("merged", s1, "into", s2)
939...     else:
940...         break
941 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
942merged K into B
943 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
944merged A into F
945 A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
946merged E into F
947 A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
948merged D into C
949 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
950merged M into C
951 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
952merged J into B
953 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
954merged B into C
955 A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
956merged F into G
957 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
958merged L into C
959 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
960merged G into I
961 A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
962merged I into H
963 A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
964merged C into H
965 A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
966
967"""
968# Emacs turd '
969
970# Fun tests (for sufficiently warped notions of "fun").
971
972fun_tests = """
973
974Build up to a recursive Sieve of Eratosthenes generator.
975
976>>> def firstn(g, n):
977...     return [next(g) for i in range(n)]
978
979>>> def intsfrom(i):
980...     while 1:
981...         yield i
982...         i += 1
983
984>>> firstn(intsfrom(5), 7)
985[5, 6, 7, 8, 9, 10, 11]
986
987>>> def exclude_multiples(n, ints):
988...     for i in ints:
989...         if i % n:
990...             yield i
991
992>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
993[1, 2, 4, 5, 7, 8]
994
995>>> def sieve(ints):
996...     prime = next(ints)
997...     yield prime
998...     not_divisible_by_prime = exclude_multiples(prime, ints)
999...     for p in sieve(not_divisible_by_prime):
1000...         yield p
1001
1002>>> primes = sieve(intsfrom(2))
1003>>> firstn(primes, 20)
1004[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
1005
1006
1007Another famous problem:  generate all integers of the form
1008    2**i * 3**j  * 5**k
1009in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
1010Try writing it without generators, and correctly, and without generating
10113 internal results for each result output.
1012
1013>>> def times(n, g):
1014...     for i in g:
1015...         yield n * i
1016>>> firstn(times(10, intsfrom(1)), 10)
1017[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
1018
1019>>> def merge(g, h):
1020...     ng = next(g)
1021...     nh = next(h)
1022...     while 1:
1023...         if ng < nh:
1024...             yield ng
1025...             ng = next(g)
1026...         elif ng > nh:
1027...             yield nh
1028...             nh = next(h)
1029...         else:
1030...             yield ng
1031...             ng = next(g)
1032...             nh = next(h)
1033
1034The following works, but is doing a whale of a lot of redundant work --
1035it's not clear how to get the internal uses of m235 to share a single
1036generator.  Note that me_times2 (etc) each need to see every element in the
1037result sequence.  So this is an example where lazy lists are more natural
1038(you can look at the head of a lazy list any number of times).
1039
1040>>> def m235():
1041...     yield 1
1042...     me_times2 = times(2, m235())
1043...     me_times3 = times(3, m235())
1044...     me_times5 = times(5, m235())
1045...     for i in merge(merge(me_times2,
1046...                          me_times3),
1047...                    me_times5):
1048...         yield i
1049
1050Don't print "too many" of these -- the implementation above is extremely
1051inefficient:  each call of m235() leads to 3 recursive calls, and in
1052turn each of those 3 more, and so on, and so on, until we've descended
1053enough levels to satisfy the print stmts.  Very odd:  when I printed 5
1054lines of results below, this managed to screw up Win98's malloc in "the
1055usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
1056address space, and it *looked* like a very slow leak.
1057
1058>>> result = m235()
1059>>> for i in range(3):
1060...     print(firstn(result, 15))
1061[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1062[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1063[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1064
1065Heh.  Here's one way to get a shared list, complete with an excruciating
1066namespace renaming trick.  The *pretty* part is that the times() and merge()
1067functions can be reused as-is, because they only assume their stream
1068arguments are iterable -- a LazyList is the same as a generator to times().
1069
1070>>> class LazyList:
1071...     def __init__(self, g):
1072...         self.sofar = []
1073...         self.fetch = g.__next__
1074...
1075...     def __getitem__(self, i):
1076...         sofar, fetch = self.sofar, self.fetch
1077...         while i >= len(sofar):
1078...             sofar.append(fetch())
1079...         return sofar[i]
1080
1081>>> def m235():
1082...     yield 1
1083...     # Gack:  m235 below actually refers to a LazyList.
1084...     me_times2 = times(2, m235)
1085...     me_times3 = times(3, m235)
1086...     me_times5 = times(5, m235)
1087...     for i in merge(merge(me_times2,
1088...                          me_times3),
1089...                    me_times5):
1090...         yield i
1091
1092Print as many of these as you like -- *this* implementation is memory-
1093efficient.
1094
1095>>> m235 = LazyList(m235())
1096>>> for i in range(5):
1097...     print([m235[j] for j in range(15*i, 15*(i+1))])
1098[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1099[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1100[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1101[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1102[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1103
1104Ye olde Fibonacci generator, LazyList style.
1105
1106>>> def fibgen(a, b):
1107...
1108...     def sum(g, h):
1109...         while 1:
1110...             yield next(g) + next(h)
1111...
1112...     def tail(g):
1113...         next(g)    # throw first away
1114...         for x in g:
1115...             yield x
1116...
1117...     yield a
1118...     yield b
1119...     for s in sum(iter(fib),
1120...                  tail(iter(fib))):
1121...         yield s
1122
1123>>> fib = LazyList(fibgen(1, 2))
1124>>> firstn(iter(fib), 17)
1125[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1126
1127
1128Running after your tail with itertools.tee (new in version 2.4)
1129
1130The algorithms "m235" (Hamming) and Fibonacci presented above are both
1131examples of a whole family of FP (functional programming) algorithms
1132where a function produces and returns a list while the production algorithm
1133suppose the list as already produced by recursively calling itself.
1134For these algorithms to work, they must:
1135
1136- produce at least a first element without presupposing the existence of
1137  the rest of the list
1138- produce their elements in a lazy manner
1139
1140To work efficiently, the beginning of the list must not be recomputed over
1141and over again. This is ensured in most FP languages as a built-in feature.
1142In python, we have to explicitly maintain a list of already computed results
1143and abandon genuine recursivity.
1144
1145This is what had been attempted above with the LazyList class. One problem
1146with that class is that it keeps a list of all of the generated results and
1147therefore continually grows. This partially defeats the goal of the generator
1148concept, viz. produce the results only as needed instead of producing them
1149all and thereby wasting memory.
1150
1151Thanks to itertools.tee, it is now clear "how to get the internal uses of
1152m235 to share a single generator".
1153
1154>>> from itertools import tee
1155>>> def m235():
1156...     def _m235():
1157...         yield 1
1158...         for n in merge(times(2, m2),
1159...                        merge(times(3, m3),
1160...                              times(5, m5))):
1161...             yield n
1162...     m1 = _m235()
1163...     m2, m3, m5, mRes = tee(m1, 4)
1164...     return mRes
1165
1166>>> it = m235()
1167>>> for i in range(5):
1168...     print(firstn(it, 15))
1169[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1170[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1171[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1172[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1173[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1174
1175The "tee" function does just what we want. It internally keeps a generated
1176result for as long as it has not been "consumed" from all of the duplicated
1177iterators, whereupon it is deleted. You can therefore print the hamming
1178sequence during hours without increasing memory usage, or very little.
1179
1180The beauty of it is that recursive running-after-their-tail FP algorithms
1181are quite straightforwardly expressed with this Python idiom.
1182
1183Ye olde Fibonacci generator, tee style.
1184
1185>>> def fib():
1186...
1187...     def _isum(g, h):
1188...         while 1:
1189...             yield next(g) + next(h)
1190...
1191...     def _fib():
1192...         yield 1
1193...         yield 2
1194...         next(fibTail) # throw first away
1195...         for res in _isum(fibHead, fibTail):
1196...             yield res
1197...
1198...     realfib = _fib()
1199...     fibHead, fibTail, fibRes = tee(realfib, 3)
1200...     return fibRes
1201
1202>>> firstn(fib(), 17)
1203[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1204
1205"""
1206
1207# syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
1208# hackery.
1209
1210syntax_tests = """
1211
1212These are fine:
1213
1214>>> def f():
1215...     yield 1
1216...     return
1217
1218>>> def f():
1219...     try:
1220...         yield 1
1221...     finally:
1222...         pass
1223
1224>>> def f():
1225...     try:
1226...         try:
1227...             1//0
1228...         except ZeroDivisionError:
1229...             yield 666
1230...         except:
1231...             pass
1232...     finally:
1233...         pass
1234
1235>>> def f():
1236...     try:
1237...         try:
1238...             yield 12
1239...             1//0
1240...         except ZeroDivisionError:
1241...             yield 666
1242...         except:
1243...             try:
1244...                 x = 12
1245...             finally:
1246...                 yield 12
1247...     except:
1248...         return
1249>>> list(f())
1250[12, 666]
1251
1252>>> def f():
1253...    yield
1254>>> type(f())
1255<class 'generator'>
1256
1257
1258>>> def f():
1259...    if 0:
1260...        yield
1261>>> type(f())
1262<class 'generator'>
1263
1264
1265>>> def f():
1266...     if 0:
1267...         yield 1
1268>>> type(f())
1269<class 'generator'>
1270
1271>>> def f():
1272...    if "":
1273...        yield None
1274>>> type(f())
1275<class 'generator'>
1276
1277>>> def f():
1278...     return
1279...     try:
1280...         if x==4:
1281...             pass
1282...         elif 0:
1283...             try:
1284...                 1//0
1285...             except SyntaxError:
1286...                 pass
1287...             else:
1288...                 if 0:
1289...                     while 12:
1290...                         x += 1
1291...                         yield 2 # don't blink
1292...                         f(a, b, c, d, e)
1293...         else:
1294...             pass
1295...     except:
1296...         x = 1
1297...     return
1298>>> type(f())
1299<class 'generator'>
1300
1301>>> def f():
1302...     if 0:
1303...         def g():
1304...             yield 1
1305...
1306>>> type(f())
1307<class 'NoneType'>
1308
1309>>> def f():
1310...     if 0:
1311...         class C:
1312...             def __init__(self):
1313...                 yield 1
1314...             def f(self):
1315...                 yield 2
1316>>> type(f())
1317<class 'NoneType'>
1318
1319>>> def f():
1320...     if 0:
1321...         return
1322...     if 0:
1323...         yield 2
1324>>> type(f())
1325<class 'generator'>
1326
1327This one caused a crash (see SF bug 567538):
1328
1329>>> def f():
1330...     for i in range(3):
1331...         try:
1332...             continue
1333...         finally:
1334...             yield i
1335...
1336>>> g = f()
1337>>> print(next(g))
13380
1339>>> print(next(g))
13401
1341>>> print(next(g))
13422
1343>>> print(next(g))
1344Traceback (most recent call last):
1345StopIteration
1346
1347
1348Test the gi_code attribute
1349
1350>>> def f():
1351...     yield 5
1352...
1353>>> g = f()
1354>>> g.gi_code is f.__code__
1355True
1356>>> next(g)
13575
1358>>> next(g)
1359Traceback (most recent call last):
1360StopIteration
1361>>> g.gi_code is f.__code__
1362True
1363
1364
1365Test the __name__ attribute and the repr()
1366
1367>>> def f():
1368...    yield 5
1369...
1370>>> g = f()
1371>>> g.__name__
1372'f'
1373>>> repr(g)  # doctest: +ELLIPSIS
1374'<generator object f at ...>'
1375
1376Lambdas shouldn't have their usual return behavior.
1377
1378>>> x = lambda: (yield 1)
1379>>> list(x())
1380[1]
1381
1382>>> x = lambda: ((yield 1), (yield 2))
1383>>> list(x())
1384[1, 2]
1385"""
1386
1387# conjoin is a simple backtracking generator, named in honor of Icon's
1388# "conjunction" control structure.  Pass a list of no-argument functions
1389# that return iterable objects.  Easiest to explain by example:  assume the
1390# function list [x, y, z] is passed.  Then conjoin acts like:
1391#
1392# def g():
1393#     values = [None] * 3
1394#     for values[0] in x():
1395#         for values[1] in y():
1396#             for values[2] in z():
1397#                 yield values
1398#
1399# So some 3-lists of values *may* be generated, each time we successfully
1400# get into the innermost loop.  If an iterator fails (is exhausted) before
1401# then, it "backtracks" to get the next value from the nearest enclosing
1402# iterator (the one "to the left"), and starts all over again at the next
1403# slot (pumps a fresh iterator).  Of course this is most useful when the
1404# iterators have side-effects, so that which values *can* be generated at
1405# each slot depend on the values iterated at previous slots.
1406
1407def simple_conjoin(gs):
1408
1409    values = [None] * len(gs)
1410
1411    def gen(i):
1412        if i >= len(gs):
1413            yield values
1414        else:
1415            for values[i] in gs[i]():
1416                for x in gen(i+1):
1417                    yield x
1418
1419    for x in gen(0):
1420        yield x
1421
1422# That works fine, but recursing a level and checking i against len(gs) for
1423# each item produced is inefficient.  By doing manual loop unrolling across
1424# generator boundaries, it's possible to eliminate most of that overhead.
1425# This isn't worth the bother *in general* for generators, but conjoin() is
1426# a core building block for some CPU-intensive generator applications.
1427
1428def conjoin(gs):
1429
1430    n = len(gs)
1431    values = [None] * n
1432
1433    # Do one loop nest at time recursively, until the # of loop nests
1434    # remaining is divisible by 3.
1435
1436    def gen(i):
1437        if i >= n:
1438            yield values
1439
1440        elif (n-i) % 3:
1441            ip1 = i+1
1442            for values[i] in gs[i]():
1443                for x in gen(ip1):
1444                    yield x
1445
1446        else:
1447            for x in _gen3(i):
1448                yield x
1449
1450    # Do three loop nests at a time, recursing only if at least three more
1451    # remain.  Don't call directly:  this is an internal optimization for
1452    # gen's use.
1453
1454    def _gen3(i):
1455        assert i < n and (n-i) % 3 == 0
1456        ip1, ip2, ip3 = i+1, i+2, i+3
1457        g, g1, g2 = gs[i : ip3]
1458
1459        if ip3 >= n:
1460            # These are the last three, so we can yield values directly.
1461            for values[i] in g():
1462                for values[ip1] in g1():
1463                    for values[ip2] in g2():
1464                        yield values
1465
1466        else:
1467            # At least 6 loop nests remain; peel off 3 and recurse for the
1468            # rest.
1469            for values[i] in g():
1470                for values[ip1] in g1():
1471                    for values[ip2] in g2():
1472                        for x in _gen3(ip3):
1473                            yield x
1474
1475    for x in gen(0):
1476        yield x
1477
1478# And one more approach:  For backtracking apps like the Knight's Tour
1479# solver below, the number of backtracking levels can be enormous (one
1480# level per square, for the Knight's Tour, so that e.g. a 100x100 board
1481# needs 10,000 levels).  In such cases Python is likely to run out of
1482# stack space due to recursion.  So here's a recursion-free version of
1483# conjoin too.
1484# NOTE WELL:  This allows large problems to be solved with only trivial
1485# demands on stack space.  Without explicitly resumable generators, this is
1486# much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
1487# than the fancy unrolled recursive conjoin.
1488
1489def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
1490    n = len(gs)
1491    values = [None] * n
1492    iters  = [None] * n
1493    _StopIteration = StopIteration  # make local because caught a *lot*
1494    i = 0
1495    while 1:
1496        # Descend.
1497        try:
1498            while i < n:
1499                it = iters[i] = gs[i]().__next__
1500                values[i] = it()
1501                i += 1
1502        except _StopIteration:
1503            pass
1504        else:
1505            assert i == n
1506            yield values
1507
1508        # Backtrack until an older iterator can be resumed.
1509        i -= 1
1510        while i >= 0:
1511            try:
1512                values[i] = iters[i]()
1513                # Success!  Start fresh at next level.
1514                i += 1
1515                break
1516            except _StopIteration:
1517                # Continue backtracking.
1518                i -= 1
1519        else:
1520            assert i < 0
1521            break
1522
1523# A conjoin-based N-Queens solver.
1524
1525class Queens:
1526    def __init__(self, n):
1527        self.n = n
1528        rangen = range(n)
1529
1530        # Assign a unique int to each column and diagonal.
1531        # columns:  n of those, range(n).
1532        # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
1533        # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
1534        # based.
1535        # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
1536        # each, smallest i+j is 0, largest is 2n-2.
1537
1538        # For each square, compute a bit vector of the columns and
1539        # diagonals it covers, and for each row compute a function that
1540        # generates the possibilities for the columns in that row.
1541        self.rowgenerators = []
1542        for i in rangen:
1543            rowuses = [(1 << j) |                  # column ordinal
1544                       (1 << (n + i-j + n-1)) |    # NW-SE ordinal
1545                       (1 << (n + 2*n-1 + i+j))    # NE-SW ordinal
1546                            for j in rangen]
1547
1548            def rowgen(rowuses=rowuses):
1549                for j in rangen:
1550                    uses = rowuses[j]
1551                    if uses & self.used == 0:
1552                        self.used |= uses
1553                        yield j
1554                        self.used &= ~uses
1555
1556            self.rowgenerators.append(rowgen)
1557
1558    # Generate solutions.
1559    def solve(self):
1560        self.used = 0
1561        for row2col in conjoin(self.rowgenerators):
1562            yield row2col
1563
1564    def printsolution(self, row2col):
1565        n = self.n
1566        assert n == len(row2col)
1567        sep = "+" + "-+" * n
1568        print(sep)
1569        for i in range(n):
1570            squares = [" " for j in range(n)]
1571            squares[row2col[i]] = "Q"
1572            print("|" + "|".join(squares) + "|")
1573            print(sep)
1574
1575# A conjoin-based Knight's Tour solver.  This is pretty sophisticated
1576# (e.g., when used with flat_conjoin above, and passing hard=1 to the
1577# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
1578# creating 10s of thousands of generators then!), and is lengthy.
1579
1580class Knights:
1581    def __init__(self, m, n, hard=0):
1582        self.m, self.n = m, n
1583
1584        # solve() will set up succs[i] to be a list of square #i's
1585        # successors.
1586        succs = self.succs = []
1587
1588        # Remove i0 from each of its successor's successor lists, i.e.
1589        # successors can't go back to i0 again.  Return 0 if we can
1590        # detect this makes a solution impossible, else return 1.
1591
1592        def remove_from_successors(i0, len=len):
1593            # If we remove all exits from a free square, we're dead:
1594            # even if we move to it next, we can't leave it again.
1595            # If we create a square with one exit, we must visit it next;
1596            # else somebody else will have to visit it, and since there's
1597            # only one adjacent, there won't be a way to leave it again.
1598            # Finally, if we create more than one free square with a
1599            # single exit, we can only move to one of them next, leaving
1600            # the other one a dead end.
1601            ne0 = ne1 = 0
1602            for i in succs[i0]:
1603                s = succs[i]
1604                s.remove(i0)
1605                e = len(s)
1606                if e == 0:
1607                    ne0 += 1
1608                elif e == 1:
1609                    ne1 += 1
1610            return ne0 == 0 and ne1 < 2
1611
1612        # Put i0 back in each of its successor's successor lists.
1613
1614        def add_to_successors(i0):
1615            for i in succs[i0]:
1616                succs[i].append(i0)
1617
1618        # Generate the first move.
1619        def first():
1620            if m < 1 or n < 1:
1621                return
1622
1623            # Since we're looking for a cycle, it doesn't matter where we
1624            # start.  Starting in a corner makes the 2nd move easy.
1625            corner = self.coords2index(0, 0)
1626            remove_from_successors(corner)
1627            self.lastij = corner
1628            yield corner
1629            add_to_successors(corner)
1630
1631        # Generate the second moves.
1632        def second():
1633            corner = self.coords2index(0, 0)
1634            assert self.lastij == corner  # i.e., we started in the corner
1635            if m < 3 or n < 3:
1636                return
1637            assert len(succs[corner]) == 2
1638            assert self.coords2index(1, 2) in succs[corner]
1639            assert self.coords2index(2, 1) in succs[corner]
1640            # Only two choices.  Whichever we pick, the other must be the
1641            # square picked on move m*n, as it's the only way to get back
1642            # to (0, 0).  Save its index in self.final so that moves before
1643            # the last know it must be kept free.
1644            for i, j in (1, 2), (2, 1):
1645                this  = self.coords2index(i, j)
1646                final = self.coords2index(3-i, 3-j)
1647                self.final = final
1648
1649                remove_from_successors(this)
1650                succs[final].append(corner)
1651                self.lastij = this
1652                yield this
1653                succs[final].remove(corner)
1654                add_to_successors(this)
1655
1656        # Generate moves 3 through m*n-1.
1657        def advance(len=len):
1658            # If some successor has only one exit, must take it.
1659            # Else favor successors with fewer exits.
1660            candidates = []
1661            for i in succs[self.lastij]:
1662                e = len(succs[i])
1663                assert e > 0, "else remove_from_successors() pruning flawed"
1664                if e == 1:
1665                    candidates = [(e, i)]
1666                    break
1667                candidates.append((e, i))
1668            else:
1669                candidates.sort()
1670
1671            for e, i in candidates:
1672                if i != self.final:
1673                    if remove_from_successors(i):
1674                        self.lastij = i
1675                        yield i
1676                    add_to_successors(i)
1677
1678        # Generate moves 3 through m*n-1.  Alternative version using a
1679        # stronger (but more expensive) heuristic to order successors.
1680        # Since the # of backtracking levels is m*n, a poor move early on
1681        # can take eons to undo.  Smallest square board for which this
1682        # matters a lot is 52x52.
1683        def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
1684            # If some successor has only one exit, must take it.
1685            # Else favor successors with fewer exits.
1686            # Break ties via max distance from board centerpoint (favor
1687            # corners and edges whenever possible).
1688            candidates = []
1689            for i in succs[self.lastij]:
1690                e = len(succs[i])
1691                assert e > 0, "else remove_from_successors() pruning flawed"
1692                if e == 1:
1693                    candidates = [(e, 0, i)]
1694                    break
1695                i1, j1 = self.index2coords(i)
1696                d = (i1 - vmid)**2 + (j1 - hmid)**2
1697                candidates.append((e, -d, i))
1698            else:
1699                candidates.sort()
1700
1701            for e, d, i in candidates:
1702                if i != self.final:
1703                    if remove_from_successors(i):
1704                        self.lastij = i
1705                        yield i
1706                    add_to_successors(i)
1707
1708        # Generate the last move.
1709        def last():
1710            assert self.final in succs[self.lastij]
1711            yield self.final
1712
1713        if m*n < 4:
1714            self.squaregenerators = [first]
1715        else:
1716            self.squaregenerators = [first, second] + \
1717                [hard and advance_hard or advance] * (m*n - 3) + \
1718                [last]
1719
1720    def coords2index(self, i, j):
1721        assert 0 <= i < self.m
1722        assert 0 <= j < self.n
1723        return i * self.n + j
1724
1725    def index2coords(self, index):
1726        assert 0 <= index < self.m * self.n
1727        return divmod(index, self.n)
1728
1729    def _init_board(self):
1730        succs = self.succs
1731        del succs[:]
1732        m, n = self.m, self.n
1733        c2i = self.coords2index
1734
1735        offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
1736                   (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
1737        rangen = range(n)
1738        for i in range(m):
1739            for j in rangen:
1740                s = [c2i(i+io, j+jo) for io, jo in offsets
1741                                     if 0 <= i+io < m and
1742                                        0 <= j+jo < n]
1743                succs.append(s)
1744
1745    # Generate solutions.
1746    def solve(self):
1747        self._init_board()
1748        for x in conjoin(self.squaregenerators):
1749            yield x
1750
1751    def printsolution(self, x):
1752        m, n = self.m, self.n
1753        assert len(x) == m*n
1754        w = len(str(m*n))
1755        format = "%" + str(w) + "d"
1756
1757        squares = [[None] * n for i in range(m)]
1758        k = 1
1759        for i in x:
1760            i1, j1 = self.index2coords(i)
1761            squares[i1][j1] = format % k
1762            k += 1
1763
1764        sep = "+" + ("-" * w + "+") * n
1765        print(sep)
1766        for i in range(m):
1767            row = squares[i]
1768            print("|" + "|".join(row) + "|")
1769            print(sep)
1770
1771conjoin_tests = """
1772
1773Generate the 3-bit binary numbers in order.  This illustrates dumbest-
1774possible use of conjoin, just to generate the full cross-product.
1775
1776>>> for c in conjoin([lambda: iter((0, 1))] * 3):
1777...     print(c)
1778[0, 0, 0]
1779[0, 0, 1]
1780[0, 1, 0]
1781[0, 1, 1]
1782[1, 0, 0]
1783[1, 0, 1]
1784[1, 1, 0]
1785[1, 1, 1]
1786
1787For efficiency in typical backtracking apps, conjoin() yields the same list
1788object each time.  So if you want to save away a full account of its
1789generated sequence, you need to copy its results.
1790
1791>>> def gencopy(iterator):
1792...     for x in iterator:
1793...         yield x[:]
1794
1795>>> for n in range(10):
1796...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
1797...     print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
17980 1 True True
17991 2 True True
18002 4 True True
18013 8 True True
18024 16 True True
18035 32 True True
18046 64 True True
18057 128 True True
18068 256 True True
18079 512 True True
1808
1809And run an 8-queens solver.
1810
1811>>> q = Queens(8)
1812>>> LIMIT = 2
1813>>> count = 0
1814>>> for row2col in q.solve():
1815...     count += 1
1816...     if count <= LIMIT:
1817...         print("Solution", count)
1818...         q.printsolution(row2col)
1819Solution 1
1820+-+-+-+-+-+-+-+-+
1821|Q| | | | | | | |
1822+-+-+-+-+-+-+-+-+
1823| | | | |Q| | | |
1824+-+-+-+-+-+-+-+-+
1825| | | | | | | |Q|
1826+-+-+-+-+-+-+-+-+
1827| | | | | |Q| | |
1828+-+-+-+-+-+-+-+-+
1829| | |Q| | | | | |
1830+-+-+-+-+-+-+-+-+
1831| | | | | | |Q| |
1832+-+-+-+-+-+-+-+-+
1833| |Q| | | | | | |
1834+-+-+-+-+-+-+-+-+
1835| | | |Q| | | | |
1836+-+-+-+-+-+-+-+-+
1837Solution 2
1838+-+-+-+-+-+-+-+-+
1839|Q| | | | | | | |
1840+-+-+-+-+-+-+-+-+
1841| | | | | |Q| | |
1842+-+-+-+-+-+-+-+-+
1843| | | | | | | |Q|
1844+-+-+-+-+-+-+-+-+
1845| | |Q| | | | | |
1846+-+-+-+-+-+-+-+-+
1847| | | | | | |Q| |
1848+-+-+-+-+-+-+-+-+
1849| | | |Q| | | | |
1850+-+-+-+-+-+-+-+-+
1851| |Q| | | | | | |
1852+-+-+-+-+-+-+-+-+
1853| | | | |Q| | | |
1854+-+-+-+-+-+-+-+-+
1855
1856>>> print(count, "solutions in all.")
185792 solutions in all.
1858
1859And run a Knight's Tour on a 10x10 board.  Note that there are about
186020,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
1861
1862>>> k = Knights(10, 10)
1863>>> LIMIT = 2
1864>>> count = 0
1865>>> for x in k.solve():
1866...     count += 1
1867...     if count <= LIMIT:
1868...         print("Solution", count)
1869...         k.printsolution(x)
1870...     else:
1871...         break
1872Solution 1
1873+---+---+---+---+---+---+---+---+---+---+
1874|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1875+---+---+---+---+---+---+---+---+---+---+
1876| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1877+---+---+---+---+---+---+---+---+---+---+
1878| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1879+---+---+---+---+---+---+---+---+---+---+
1880| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1881+---+---+---+---+---+---+---+---+---+---+
1882| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1883+---+---+---+---+---+---+---+---+---+---+
1884| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1885+---+---+---+---+---+---+---+---+---+---+
1886| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
1887+---+---+---+---+---+---+---+---+---+---+
1888| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
1889+---+---+---+---+---+---+---+---+---+---+
1890| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
1891+---+---+---+---+---+---+---+---+---+---+
1892| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
1893+---+---+---+---+---+---+---+---+---+---+
1894Solution 2
1895+---+---+---+---+---+---+---+---+---+---+
1896|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1897+---+---+---+---+---+---+---+---+---+---+
1898| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1899+---+---+---+---+---+---+---+---+---+---+
1900| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1901+---+---+---+---+---+---+---+---+---+---+
1902| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1903+---+---+---+---+---+---+---+---+---+---+
1904| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1905+---+---+---+---+---+---+---+---+---+---+
1906| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1907+---+---+---+---+---+---+---+---+---+---+
1908| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
1909+---+---+---+---+---+---+---+---+---+---+
1910| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
1911+---+---+---+---+---+---+---+---+---+---+
1912| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
1913+---+---+---+---+---+---+---+---+---+---+
1914| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
1915+---+---+---+---+---+---+---+---+---+---+
1916"""
1917
1918weakref_tests = """\
1919Generators are weakly referencable:
1920
1921>>> import weakref
1922>>> def gen():
1923...     yield 'foo!'
1924...
1925>>> wr = weakref.ref(gen)
1926>>> wr() is gen
1927True
1928>>> p = weakref.proxy(gen)
1929
1930Generator-iterators are weakly referencable as well:
1931
1932>>> gi = gen()
1933>>> wr = weakref.ref(gi)
1934>>> wr() is gi
1935True
1936>>> p = weakref.proxy(gi)
1937>>> list(p)
1938['foo!']
1939
1940"""
1941
1942coroutine_tests = """\
1943Sending a value into a started generator:
1944
1945>>> def f():
1946...     print((yield 1))
1947...     yield 2
1948>>> g = f()
1949>>> next(g)
19501
1951>>> g.send(42)
195242
19532
1954
1955Sending a value into a new generator produces a TypeError:
1956
1957>>> f().send("foo")
1958Traceback (most recent call last):
1959...
1960TypeError: can't send non-None value to a just-started generator
1961
1962
1963Yield by itself yields None:
1964
1965>>> def f(): yield
1966>>> list(f())
1967[None]
1968
1969
1970Yield is allowed only in the outermost iterable in generator expression:
1971
1972>>> def f(): list(i for i in [(yield 26)])
1973>>> type(f())
1974<class 'generator'>
1975
1976
1977A yield expression with augmented assignment.
1978
1979>>> def coroutine(seq):
1980...     count = 0
1981...     while count < 200:
1982...         count += yield
1983...         seq.append(count)
1984>>> seq = []
1985>>> c = coroutine(seq)
1986>>> next(c)
1987>>> print(seq)
1988[]
1989>>> c.send(10)
1990>>> print(seq)
1991[10]
1992>>> c.send(10)
1993>>> print(seq)
1994[10, 20]
1995>>> c.send(10)
1996>>> print(seq)
1997[10, 20, 30]
1998
1999
2000Check some syntax errors for yield expressions:
2001
2002>>> f=lambda: (yield 1),(yield 2)
2003Traceback (most recent call last):
2004  ...
2005SyntaxError: 'yield' outside function
2006
2007# Pegen does not produce this error message yet
2008# >>> def f(): x = yield = y
2009# Traceback (most recent call last):
2010#   ...
2011# SyntaxError: assignment to yield expression not possible
2012
2013>>> def f(): (yield bar) = y
2014Traceback (most recent call last):
2015  ...
2016SyntaxError: cannot assign to yield expression
2017
2018>>> def f(): (yield bar) += y
2019Traceback (most recent call last):
2020  ...
2021SyntaxError: 'yield expression' is an illegal expression for augmented assignment
2022
2023
2024Now check some throw() conditions:
2025
2026>>> def f():
2027...     while True:
2028...         try:
2029...             print((yield))
2030...         except ValueError as v:
2031...             print("caught ValueError (%s)" % (v))
2032>>> import sys
2033>>> g = f()
2034>>> next(g)
2035
2036>>> g.throw(ValueError) # type only
2037caught ValueError ()
2038
2039>>> g.throw(ValueError("xyz"))  # value only
2040caught ValueError (xyz)
2041
2042>>> g.throw(ValueError, ValueError(1))   # value+matching type
2043caught ValueError (1)
2044
2045>>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
2046caught ValueError (1)
2047
2048>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
2049caught ValueError (1)
2050
2051>>> g.throw(ValueError(1), "foo")       # bad args
2052Traceback (most recent call last):
2053  ...
2054TypeError: instance exception may not have a separate value
2055
2056>>> g.throw(ValueError, "foo", 23)      # bad args
2057Traceback (most recent call last):
2058  ...
2059TypeError: throw() third argument must be a traceback object
2060
2061>>> g.throw("abc")
2062Traceback (most recent call last):
2063  ...
2064TypeError: exceptions must be classes or instances deriving from BaseException, not str
2065
2066>>> g.throw(0)
2067Traceback (most recent call last):
2068  ...
2069TypeError: exceptions must be classes or instances deriving from BaseException, not int
2070
2071>>> g.throw(list)
2072Traceback (most recent call last):
2073  ...
2074TypeError: exceptions must be classes or instances deriving from BaseException, not type
2075
2076>>> def throw(g,exc):
2077...     try:
2078...         raise exc
2079...     except:
2080...         g.throw(*sys.exc_info())
2081>>> throw(g,ValueError) # do it with traceback included
2082caught ValueError ()
2083
2084>>> g.send(1)
20851
2086
2087>>> throw(g,TypeError)  # terminate the generator
2088Traceback (most recent call last):
2089  ...
2090TypeError
2091
2092>>> print(g.gi_frame)
2093None
2094
2095>>> g.send(2)
2096Traceback (most recent call last):
2097  ...
2098StopIteration
2099
2100>>> g.throw(ValueError,6)       # throw on closed generator
2101Traceback (most recent call last):
2102  ...
2103ValueError: 6
2104
2105>>> f().throw(ValueError,7)     # throw on just-opened generator
2106Traceback (most recent call last):
2107  ...
2108ValueError: 7
2109
2110Plain "raise" inside a generator should preserve the traceback (#13188).
2111The traceback should have 3 levels:
2112- g.throw()
2113- f()
2114- 1/0
2115
2116>>> def f():
2117...     try:
2118...         yield
2119...     except:
2120...         raise
2121>>> g = f()
2122>>> try:
2123...     1/0
2124... except ZeroDivisionError as v:
2125...     try:
2126...         g.throw(v)
2127...     except Exception as w:
2128...         tb = w.__traceback__
2129>>> levels = 0
2130>>> while tb:
2131...     levels += 1
2132...     tb = tb.tb_next
2133>>> levels
21343
2135
2136Now let's try closing a generator:
2137
2138>>> def f():
2139...     try: yield
2140...     except GeneratorExit:
2141...         print("exiting")
2142
2143>>> g = f()
2144>>> next(g)
2145>>> g.close()
2146exiting
2147>>> g.close()  # should be no-op now
2148
2149>>> f().close()  # close on just-opened generator should be fine
2150
2151>>> def f(): yield      # an even simpler generator
2152>>> f().close()         # close before opening
2153>>> g = f()
2154>>> next(g)
2155>>> g.close()           # close normally
2156
2157And finalization:
2158
2159>>> def f():
2160...     try: yield
2161...     finally:
2162...         print("exiting")
2163
2164>>> g = f()
2165>>> next(g)
2166>>> del g
2167exiting
2168
2169
2170GeneratorExit is not caught by except Exception:
2171
2172>>> def f():
2173...     try: yield
2174...     except Exception:
2175...         print('except')
2176...     finally:
2177...         print('finally')
2178
2179>>> g = f()
2180>>> next(g)
2181>>> del g
2182finally
2183
2184
2185Now let's try some ill-behaved generators:
2186
2187>>> def f():
2188...     try: yield
2189...     except GeneratorExit:
2190...         yield "foo!"
2191>>> g = f()
2192>>> next(g)
2193>>> g.close()
2194Traceback (most recent call last):
2195  ...
2196RuntimeError: generator ignored GeneratorExit
2197>>> g.close()
2198
2199
2200Our ill-behaved code should be invoked during GC:
2201
2202>>> with support.catch_unraisable_exception() as cm:
2203...     g = f()
2204...     next(g)
2205...     del g
2206...
2207...     cm.unraisable.exc_type == RuntimeError
2208...     "generator ignored GeneratorExit" in str(cm.unraisable.exc_value)
2209...     cm.unraisable.exc_traceback is not None
2210True
2211True
2212True
2213
2214And errors thrown during closing should propagate:
2215
2216>>> def f():
2217...     try: yield
2218...     except GeneratorExit:
2219...         raise TypeError("fie!")
2220>>> g = f()
2221>>> next(g)
2222>>> g.close()
2223Traceback (most recent call last):
2224  ...
2225TypeError: fie!
2226
2227
2228Ensure that various yield expression constructs make their
2229enclosing function a generator:
2230
2231>>> def f(): x += yield
2232>>> type(f())
2233<class 'generator'>
2234
2235>>> def f(): x = yield
2236>>> type(f())
2237<class 'generator'>
2238
2239>>> def f(): lambda x=(yield): 1
2240>>> type(f())
2241<class 'generator'>
2242
2243>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
2244>>> data = [1,2]
2245>>> g = f(data)
2246>>> type(g)
2247<class 'generator'>
2248>>> g.send(None)
2249'a'
2250>>> data
2251[1, 2]
2252>>> g.send(0)
2253'b'
2254>>> data
2255[27, 2]
2256>>> try: g.send(1)
2257... except StopIteration: pass
2258>>> data
2259[27, 27]
2260
2261"""
2262
2263refleaks_tests = """
2264Prior to adding cycle-GC support to itertools.tee, this code would leak
2265references. We add it to the standard suite so the routine refleak-tests
2266would trigger if it starts being uncleanable again.
2267
2268>>> import itertools
2269>>> def leak():
2270...     class gen:
2271...         def __iter__(self):
2272...             return self
2273...         def __next__(self):
2274...             return self.item
2275...     g = gen()
2276...     head, tail = itertools.tee(g)
2277...     g.item = head
2278...     return head
2279>>> it = leak()
2280
2281Make sure to also test the involvement of the tee-internal teedataobject,
2282which stores returned items.
2283
2284>>> item = next(it)
2285
2286
2287
2288This test leaked at one point due to generator finalization/destruction.
2289It was copied from Lib/test/leakers/test_generator_cycle.py before the file
2290was removed.
2291
2292>>> def leak():
2293...    def gen():
2294...        while True:
2295...            yield g
2296...    g = gen()
2297
2298>>> leak()
2299
2300
2301
2302This test isn't really generator related, but rather exception-in-cleanup
2303related. The coroutine tests (above) just happen to cause an exception in
2304the generator's __del__ (tp_del) method. We can also test for this
2305explicitly, without generators. We do have to redirect stderr to avoid
2306printing warnings and to doublecheck that we actually tested what we wanted
2307to test.
2308
2309>>> from test import support
2310>>> class Leaker:
2311...     def __del__(self):
2312...         def invoke(message):
2313...             raise RuntimeError(message)
2314...         invoke("del failed")
2315...
2316>>> with support.catch_unraisable_exception() as cm:
2317...     l = Leaker()
2318...     del l
2319...
2320...     cm.unraisable.object == Leaker.__del__
2321...     cm.unraisable.exc_type == RuntimeError
2322...     str(cm.unraisable.exc_value) == "del failed"
2323...     cm.unraisable.exc_traceback is not None
2324True
2325True
2326True
2327True
2328
2329
2330These refleak tests should perhaps be in a testfile of their own,
2331test_generators just happened to be the test that drew these out.
2332
2333"""
2334
2335__test__ = {"tut":      tutorial_tests,
2336            "pep":      pep_tests,
2337            "email":    email_tests,
2338            "fun":      fun_tests,
2339            "syntax":   syntax_tests,
2340            "conjoin":  conjoin_tests,
2341            "weakref":  weakref_tests,
2342            "coroutine":  coroutine_tests,
2343            "refleaks": refleaks_tests,
2344            }
2345
2346# Magic test name that regrtest.py invokes *after* importing this module.
2347# This worms around a bootstrap problem.
2348# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
2349# so this works as expected in both ways of running regrtest.
2350def test_main(verbose=None):
2351    from test import support, test_generators
2352    support.run_unittest(__name__)
2353    support.run_doctest(test_generators, verbose)
2354
2355# This part isn't needed for regrtest, but for running the test directly.
2356if __name__ == "__main__":
2357    test_main(1)
2358