• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1import copy
2import gc
3import pickle
4import sys
5import unittest
6import weakref
7import inspect
8
9from test import support
10
11try:
12    import _testcapi
13except ImportError:
14    _testcapi = None
15
16
17# This tests to make sure that if a SIGINT arrives just before we send into a
18# yield from chain, the KeyboardInterrupt is raised in the innermost
19# generator (see bpo-30039).
20@unittest.skipUnless(_testcapi is not None and
21                     hasattr(_testcapi, "raise_SIGINT_then_send_None"),
22                     "needs _testcapi.raise_SIGINT_then_send_None")
23class SignalAndYieldFromTest(unittest.TestCase):
24
25    def generator1(self):
26        return (yield from self.generator2())
27
28    def generator2(self):
29        try:
30            yield
31        except KeyboardInterrupt:
32            return "PASSED"
33        else:
34            return "FAILED"
35
36    def test_raise_and_yield_from(self):
37        gen = self.generator1()
38        gen.send(None)
39        try:
40            _testcapi.raise_SIGINT_then_send_None(gen)
41        except BaseException as _exc:
42            exc = _exc
43        self.assertIs(type(exc), StopIteration)
44        self.assertEqual(exc.value, "PASSED")
45
46
47class FinalizationTest(unittest.TestCase):
48
49    def test_frame_resurrect(self):
50        # A generator frame can be resurrected by a generator's finalization.
51        def gen():
52            nonlocal frame
53            try:
54                yield
55            finally:
56                frame = sys._getframe()
57
58        g = gen()
59        wr = weakref.ref(g)
60        next(g)
61        del g
62        support.gc_collect()
63        self.assertIs(wr(), None)
64        self.assertTrue(frame)
65        del frame
66        support.gc_collect()
67
68    def test_refcycle(self):
69        # A generator caught in a refcycle gets finalized anyway.
70        old_garbage = gc.garbage[:]
71        finalized = False
72        def gen():
73            nonlocal finalized
74            try:
75                g = yield
76                yield 1
77            finally:
78                finalized = True
79
80        g = gen()
81        next(g)
82        g.send(g)
83        self.assertGreater(sys.getrefcount(g), 2)
84        self.assertFalse(finalized)
85        del g
86        support.gc_collect()
87        self.assertTrue(finalized)
88        self.assertEqual(gc.garbage, old_garbage)
89
90    def test_lambda_generator(self):
91        # Issue #23192: Test that a lambda returning a generator behaves
92        # like the equivalent function
93        f = lambda: (yield 1)
94        def g(): return (yield 1)
95
96        # test 'yield from'
97        f2 = lambda: (yield from g())
98        def g2(): return (yield from g())
99
100        f3 = lambda: (yield from f())
101        def g3(): return (yield from f())
102
103        for gen_fun in (f, g, f2, g2, f3, g3):
104            gen = gen_fun()
105            self.assertEqual(next(gen), 1)
106            with self.assertRaises(StopIteration) as cm:
107                gen.send(2)
108            self.assertEqual(cm.exception.value, 2)
109
110
111class GeneratorTest(unittest.TestCase):
112
113    def test_name(self):
114        def func():
115            yield 1
116
117        # check generator names
118        gen = func()
119        self.assertEqual(gen.__name__, "func")
120        self.assertEqual(gen.__qualname__,
121                         "GeneratorTest.test_name.<locals>.func")
122
123        # modify generator names
124        gen.__name__ = "name"
125        gen.__qualname__ = "qualname"
126        self.assertEqual(gen.__name__, "name")
127        self.assertEqual(gen.__qualname__, "qualname")
128
129        # generator names must be a string and cannot be deleted
130        self.assertRaises(TypeError, setattr, gen, '__name__', 123)
131        self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
132        self.assertRaises(TypeError, delattr, gen, '__name__')
133        self.assertRaises(TypeError, delattr, gen, '__qualname__')
134
135        # modify names of the function creating the generator
136        func.__qualname__ = "func_qualname"
137        func.__name__ = "func_name"
138        gen = func()
139        self.assertEqual(gen.__name__, "func_name")
140        self.assertEqual(gen.__qualname__, "func_qualname")
141
142        # unnamed generator
143        gen = (x for x in range(10))
144        self.assertEqual(gen.__name__,
145                         "<genexpr>")
146        self.assertEqual(gen.__qualname__,
147                         "GeneratorTest.test_name.<locals>.<genexpr>")
148
149    def test_copy(self):
150        def f():
151            yield 1
152        g = f()
153        with self.assertRaises(TypeError):
154            copy.copy(g)
155
156    def test_pickle(self):
157        def f():
158            yield 1
159        g = f()
160        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
161            with self.assertRaises((TypeError, pickle.PicklingError)):
162                pickle.dumps(g, proto)
163
164    def test_send_non_none_to_new_gen(self):
165        def f():
166            yield 1
167        g = f()
168        with self.assertRaises(TypeError):
169            g.send(0)
170        self.assertEqual(next(g), 1)
171
172
173class ExceptionTest(unittest.TestCase):
174    # Tests for the issue #23353: check that the currently handled exception
175    # is correctly saved/restored in PyEval_EvalFrameEx().
176
177    def test_except_throw(self):
178        def store_raise_exc_generator():
179            try:
180                self.assertEqual(sys.exc_info()[0], None)
181                yield
182            except Exception as exc:
183                # exception raised by gen.throw(exc)
184                self.assertEqual(sys.exc_info()[0], ValueError)
185                self.assertIsNone(exc.__context__)
186                yield
187
188                # ensure that the exception is not lost
189                self.assertEqual(sys.exc_info()[0], ValueError)
190                yield
191
192                # we should be able to raise back the ValueError
193                raise
194
195        make = store_raise_exc_generator()
196        next(make)
197
198        try:
199            raise ValueError()
200        except Exception as exc:
201            try:
202                make.throw(exc)
203            except Exception:
204                pass
205
206        next(make)
207        with self.assertRaises(ValueError) as cm:
208            next(make)
209        self.assertIsNone(cm.exception.__context__)
210
211        self.assertEqual(sys.exc_info(), (None, None, None))
212
213    def test_except_next(self):
214        def gen():
215            self.assertEqual(sys.exc_info()[0], ValueError)
216            yield "done"
217
218        g = gen()
219        try:
220            raise ValueError
221        except Exception:
222            self.assertEqual(next(g), "done")
223        self.assertEqual(sys.exc_info(), (None, None, None))
224
225    def test_except_gen_except(self):
226        def gen():
227            try:
228                self.assertEqual(sys.exc_info()[0], None)
229                yield
230                # we are called from "except ValueError:", TypeError must
231                # inherit ValueError in its context
232                raise TypeError()
233            except TypeError as exc:
234                self.assertEqual(sys.exc_info()[0], TypeError)
235                self.assertEqual(type(exc.__context__), ValueError)
236            # here we are still called from the "except ValueError:"
237            self.assertEqual(sys.exc_info()[0], ValueError)
238            yield
239            self.assertIsNone(sys.exc_info()[0])
240            yield "done"
241
242        g = gen()
243        next(g)
244        try:
245            raise ValueError
246        except Exception:
247            next(g)
248
249        self.assertEqual(next(g), "done")
250        self.assertEqual(sys.exc_info(), (None, None, None))
251
252    def test_except_throw_exception_context(self):
253        def gen():
254            try:
255                try:
256                    self.assertEqual(sys.exc_info()[0], None)
257                    yield
258                except ValueError:
259                    # we are called from "except ValueError:"
260                    self.assertEqual(sys.exc_info()[0], ValueError)
261                    raise TypeError()
262            except Exception as exc:
263                self.assertEqual(sys.exc_info()[0], TypeError)
264                self.assertEqual(type(exc.__context__), ValueError)
265            # we are still called from "except ValueError:"
266            self.assertEqual(sys.exc_info()[0], ValueError)
267            yield
268            self.assertIsNone(sys.exc_info()[0])
269            yield "done"
270
271        g = gen()
272        next(g)
273        try:
274            raise ValueError
275        except Exception as exc:
276            g.throw(exc)
277
278        self.assertEqual(next(g), "done")
279        self.assertEqual(sys.exc_info(), (None, None, None))
280
281    def test_except_throw_bad_exception(self):
282        class E(Exception):
283            def __new__(cls, *args, **kwargs):
284                return cls
285
286        def boring_generator():
287            yield
288
289        gen = boring_generator()
290
291        err_msg = 'should have returned an instance of BaseException'
292
293        with self.assertRaisesRegex(TypeError, err_msg):
294            gen.throw(E)
295
296        self.assertRaises(StopIteration, next, gen)
297
298        def generator():
299            with self.assertRaisesRegex(TypeError, err_msg):
300                yield
301
302        gen = generator()
303        next(gen)
304        with self.assertRaises(StopIteration):
305            gen.throw(E)
306
307    def test_stopiteration_error(self):
308        # See also PEP 479.
309
310        def gen():
311            raise StopIteration
312            yield
313
314        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
315            next(gen())
316
317    def test_tutorial_stopiteration(self):
318        # Raise StopIteration" stops the generator too:
319
320        def f():
321            yield 1
322            raise StopIteration
323            yield 2 # never reached
324
325        g = f()
326        self.assertEqual(next(g), 1)
327
328        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
329            next(g)
330
331    def test_return_tuple(self):
332        def g():
333            return (yield 1)
334
335        gen = g()
336        self.assertEqual(next(gen), 1)
337        with self.assertRaises(StopIteration) as cm:
338            gen.send((2,))
339        self.assertEqual(cm.exception.value, (2,))
340
341    def test_return_stopiteration(self):
342        def g():
343            return (yield 1)
344
345        gen = g()
346        self.assertEqual(next(gen), 1)
347        with self.assertRaises(StopIteration) as cm:
348            gen.send(StopIteration(2))
349        self.assertIsInstance(cm.exception.value, StopIteration)
350        self.assertEqual(cm.exception.value.value, 2)
351
352
353class GeneratorThrowTest(unittest.TestCase):
354
355    def test_exception_context_with_yield(self):
356        def f():
357            try:
358                raise KeyError('a')
359            except Exception:
360                yield
361
362        gen = f()
363        gen.send(None)
364        with self.assertRaises(ValueError) as cm:
365            gen.throw(ValueError)
366        context = cm.exception.__context__
367        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
368
369    def test_exception_context_with_yield_inside_generator(self):
370        # Check that the context is also available from inside the generator
371        # with yield, as opposed to outside.
372        def f():
373            try:
374                raise KeyError('a')
375            except Exception:
376                try:
377                    yield
378                except Exception as exc:
379                    self.assertEqual(type(exc), ValueError)
380                    context = exc.__context__
381                    self.assertEqual((type(context), context.args),
382                        (KeyError, ('a',)))
383                    yield 'b'
384
385        gen = f()
386        gen.send(None)
387        actual = gen.throw(ValueError)
388        # This ensures that the assertions inside were executed.
389        self.assertEqual(actual, 'b')
390
391    def test_exception_context_with_yield_from(self):
392        def f():
393            yield
394
395        def g():
396            try:
397                raise KeyError('a')
398            except Exception:
399                yield from f()
400
401        gen = g()
402        gen.send(None)
403        with self.assertRaises(ValueError) as cm:
404            gen.throw(ValueError)
405        context = cm.exception.__context__
406        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
407
408    def test_exception_context_with_yield_from_with_context_cycle(self):
409        # Check trying to create an exception context cycle:
410        # https://bugs.python.org/issue40696
411        has_cycle = None
412
413        def f():
414            yield
415
416        def g(exc):
417            nonlocal has_cycle
418            try:
419                raise exc
420            except Exception:
421                try:
422                    yield from f()
423                except Exception as exc:
424                    has_cycle = (exc is exc.__context__)
425            yield
426
427        exc = KeyError('a')
428        gen = g(exc)
429        gen.send(None)
430        gen.throw(exc)
431        # This also distinguishes from the initial has_cycle=None.
432        self.assertEqual(has_cycle, False)
433
434    def test_throw_after_none_exc_type(self):
435        def g():
436            try:
437                raise KeyError
438            except KeyError:
439                pass
440
441            try:
442                yield
443            except Exception:
444                raise RuntimeError
445
446        gen = g()
447        gen.send(None)
448        with self.assertRaises(RuntimeError) as cm:
449            gen.throw(ValueError)
450
451
452class GeneratorStackTraceTest(unittest.TestCase):
453
454    def check_stack_names(self, frame, expected):
455        names = []
456        while frame:
457            name = frame.f_code.co_name
458            # Stop checking frames when we get to our test helper.
459            if name.startswith('check_') or name.startswith('call_'):
460                break
461
462            names.append(name)
463            frame = frame.f_back
464
465        self.assertEqual(names, expected)
466
467    def check_yield_from_example(self, call_method):
468        def f():
469            self.check_stack_names(sys._getframe(), ['f', 'g'])
470            try:
471                yield
472            except Exception:
473                pass
474            self.check_stack_names(sys._getframe(), ['f', 'g'])
475
476        def g():
477            self.check_stack_names(sys._getframe(), ['g'])
478            yield from f()
479            self.check_stack_names(sys._getframe(), ['g'])
480
481        gen = g()
482        gen.send(None)
483        try:
484            call_method(gen)
485        except StopIteration:
486            pass
487
488    def test_send_with_yield_from(self):
489        def call_send(gen):
490            gen.send(None)
491
492        self.check_yield_from_example(call_send)
493
494    def test_throw_with_yield_from(self):
495        def call_throw(gen):
496            gen.throw(RuntimeError)
497
498        self.check_yield_from_example(call_throw)
499
500
501class YieldFromTests(unittest.TestCase):
502    def test_generator_gi_yieldfrom(self):
503        def a():
504            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
505            self.assertIsNone(gen_b.gi_yieldfrom)
506            yield
507            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
508            self.assertIsNone(gen_b.gi_yieldfrom)
509
510        def b():
511            self.assertIsNone(gen_b.gi_yieldfrom)
512            yield from a()
513            self.assertIsNone(gen_b.gi_yieldfrom)
514            yield
515            self.assertIsNone(gen_b.gi_yieldfrom)
516
517        gen_b = b()
518        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
519        self.assertIsNone(gen_b.gi_yieldfrom)
520
521        gen_b.send(None)
522        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
523        self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
524
525        gen_b.send(None)
526        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
527        self.assertIsNone(gen_b.gi_yieldfrom)
528
529        [] = gen_b  # Exhaust generator
530        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
531        self.assertIsNone(gen_b.gi_yieldfrom)
532
533
534tutorial_tests = """
535Let's try a simple generator:
536
537    >>> def f():
538    ...    yield 1
539    ...    yield 2
540
541    >>> for i in f():
542    ...     print(i)
543    1
544    2
545    >>> g = f()
546    >>> next(g)
547    1
548    >>> next(g)
549    2
550
551"Falling off the end" stops the generator:
552
553    >>> next(g)
554    Traceback (most recent call last):
555      File "<stdin>", line 1, in ?
556      File "<stdin>", line 2, in g
557    StopIteration
558
559"return" also stops the generator:
560
561    >>> def f():
562    ...     yield 1
563    ...     return
564    ...     yield 2 # never reached
565    ...
566    >>> g = f()
567    >>> next(g)
568    1
569    >>> next(g)
570    Traceback (most recent call last):
571      File "<stdin>", line 1, in ?
572      File "<stdin>", line 3, in f
573    StopIteration
574    >>> next(g) # once stopped, can't be resumed
575    Traceback (most recent call last):
576      File "<stdin>", line 1, in ?
577    StopIteration
578
579However, "return" and StopIteration are not exactly equivalent:
580
581    >>> def g1():
582    ...     try:
583    ...         return
584    ...     except:
585    ...         yield 1
586    ...
587    >>> list(g1())
588    []
589
590    >>> def g2():
591    ...     try:
592    ...         raise StopIteration
593    ...     except:
594    ...         yield 42
595    >>> print(list(g2()))
596    [42]
597
598This may be surprising at first:
599
600    >>> def g3():
601    ...     try:
602    ...         return
603    ...     finally:
604    ...         yield 1
605    ...
606    >>> list(g3())
607    [1]
608
609Let's create an alternate range() function implemented as a generator:
610
611    >>> def yrange(n):
612    ...     for i in range(n):
613    ...         yield i
614    ...
615    >>> list(yrange(5))
616    [0, 1, 2, 3, 4]
617
618Generators always return to the most recent caller:
619
620    >>> def creator():
621    ...     r = yrange(5)
622    ...     print("creator", next(r))
623    ...     return r
624    ...
625    >>> def caller():
626    ...     r = creator()
627    ...     for i in r:
628    ...             print("caller", i)
629    ...
630    >>> caller()
631    creator 0
632    caller 1
633    caller 2
634    caller 3
635    caller 4
636
637Generators can call other generators:
638
639    >>> def zrange(n):
640    ...     for i in yrange(n):
641    ...         yield i
642    ...
643    >>> list(zrange(5))
644    [0, 1, 2, 3, 4]
645
646"""
647
648# The examples from PEP 255.
649
650pep_tests = """
651
652Specification:  Yield
653
654    Restriction:  A generator cannot be resumed while it is actively
655    running:
656
657    >>> def g():
658    ...     i = next(me)
659    ...     yield i
660    >>> me = g()
661    >>> next(me)
662    Traceback (most recent call last):
663     ...
664      File "<string>", line 2, in g
665    ValueError: generator already executing
666
667Specification: Return
668
669    Note that return isn't always equivalent to raising StopIteration:  the
670    difference lies in how enclosing try/except constructs are treated.
671    For example,
672
673        >>> def f1():
674        ...     try:
675        ...         return
676        ...     except:
677        ...        yield 1
678        >>> print(list(f1()))
679        []
680
681    because, as in any function, return simply exits, but
682
683        >>> def f2():
684        ...     try:
685        ...         raise StopIteration
686        ...     except:
687        ...         yield 42
688        >>> print(list(f2()))
689        [42]
690
691    because StopIteration is captured by a bare "except", as is any
692    exception.
693
694Specification: Generators and Exception Propagation
695
696    >>> def f():
697    ...     return 1//0
698    >>> def g():
699    ...     yield f()  # the zero division exception propagates
700    ...     yield 42   # and we'll never get here
701    >>> k = g()
702    >>> next(k)
703    Traceback (most recent call last):
704      File "<stdin>", line 1, in ?
705      File "<stdin>", line 2, in g
706      File "<stdin>", line 2, in f
707    ZeroDivisionError: integer division or modulo by zero
708    >>> next(k)  # and the generator cannot be resumed
709    Traceback (most recent call last):
710      File "<stdin>", line 1, in ?
711    StopIteration
712    >>>
713
714Specification: Try/Except/Finally
715
716    >>> def f():
717    ...     try:
718    ...         yield 1
719    ...         try:
720    ...             yield 2
721    ...             1//0
722    ...             yield 3  # never get here
723    ...         except ZeroDivisionError:
724    ...             yield 4
725    ...             yield 5
726    ...             raise
727    ...         except:
728    ...             yield 6
729    ...         yield 7     # the "raise" above stops this
730    ...     except:
731    ...         yield 8
732    ...     yield 9
733    ...     try:
734    ...         x = 12
735    ...     finally:
736    ...         yield 10
737    ...     yield 11
738    >>> print(list(f()))
739    [1, 2, 4, 5, 8, 9, 10, 11]
740    >>>
741
742Guido's binary tree example.
743
744    >>> # A binary tree class.
745    >>> class Tree:
746    ...
747    ...     def __init__(self, label, left=None, right=None):
748    ...         self.label = label
749    ...         self.left = left
750    ...         self.right = right
751    ...
752    ...     def __repr__(self, level=0, indent="    "):
753    ...         s = level*indent + repr(self.label)
754    ...         if self.left:
755    ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
756    ...         if self.right:
757    ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
758    ...         return s
759    ...
760    ...     def __iter__(self):
761    ...         return inorder(self)
762
763    >>> # Create a Tree from a list.
764    >>> def tree(list):
765    ...     n = len(list)
766    ...     if n == 0:
767    ...         return []
768    ...     i = n // 2
769    ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
770
771    >>> # Show it off: create a tree.
772    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
773
774    >>> # A recursive generator that generates Tree labels in in-order.
775    >>> def inorder(t):
776    ...     if t:
777    ...         for x in inorder(t.left):
778    ...             yield x
779    ...         yield t.label
780    ...         for x in inorder(t.right):
781    ...             yield x
782
783    >>> # Show it off: create a tree.
784    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
785    >>> # Print the nodes of the tree in in-order.
786    >>> for x in t:
787    ...     print(' '+x, end='')
788     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
789
790    >>> # A non-recursive generator.
791    >>> def inorder(node):
792    ...     stack = []
793    ...     while node:
794    ...         while node.left:
795    ...             stack.append(node)
796    ...             node = node.left
797    ...         yield node.label
798    ...         while not node.right:
799    ...             try:
800    ...                 node = stack.pop()
801    ...             except IndexError:
802    ...                 return
803    ...             yield node.label
804    ...         node = node.right
805
806    >>> # Exercise the non-recursive generator.
807    >>> for x in t:
808    ...     print(' '+x, end='')
809     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
810
811"""
812
813# Examples from Iterator-List and Python-Dev and c.l.py.
814
815email_tests = """
816
817The difference between yielding None and returning it.
818
819>>> def g():
820...     for i in range(3):
821...         yield None
822...     yield None
823...     return
824>>> list(g())
825[None, None, None, None]
826
827Ensure that explicitly raising StopIteration acts like any other exception
828in try/except, not like a return.
829
830>>> def g():
831...     yield 1
832...     try:
833...         raise StopIteration
834...     except:
835...         yield 2
836...     yield 3
837>>> list(g())
838[1, 2, 3]
839
840Next one was posted to c.l.py.
841
842>>> def gcomb(x, k):
843...     "Generate all combinations of k elements from list x."
844...
845...     if k > len(x):
846...         return
847...     if k == 0:
848...         yield []
849...     else:
850...         first, rest = x[0], x[1:]
851...         # A combination does or doesn't contain first.
852...         # If it does, the remainder is a k-1 comb of rest.
853...         for c in gcomb(rest, k-1):
854...             c.insert(0, first)
855...             yield c
856...         # If it doesn't contain first, it's a k comb of rest.
857...         for c in gcomb(rest, k):
858...             yield c
859
860>>> seq = list(range(1, 5))
861>>> for k in range(len(seq) + 2):
862...     print("%d-combs of %s:" % (k, seq))
863...     for c in gcomb(seq, k):
864...         print("   ", c)
8650-combs of [1, 2, 3, 4]:
866    []
8671-combs of [1, 2, 3, 4]:
868    [1]
869    [2]
870    [3]
871    [4]
8722-combs of [1, 2, 3, 4]:
873    [1, 2]
874    [1, 3]
875    [1, 4]
876    [2, 3]
877    [2, 4]
878    [3, 4]
8793-combs of [1, 2, 3, 4]:
880    [1, 2, 3]
881    [1, 2, 4]
882    [1, 3, 4]
883    [2, 3, 4]
8844-combs of [1, 2, 3, 4]:
885    [1, 2, 3, 4]
8865-combs of [1, 2, 3, 4]:
887
888From the Iterators list, about the types of these things.
889
890>>> def g():
891...     yield 1
892...
893>>> type(g)
894<class 'function'>
895>>> i = g()
896>>> type(i)
897<class 'generator'>
898>>> [s for s in dir(i) if not s.startswith('_')]
899['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_yieldfrom', 'send', 'throw']
900>>> from test.support import HAVE_DOCSTRINGS
901>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
902Implement next(self).
903>>> iter(i) is i
904True
905>>> import types
906>>> isinstance(i, types.GeneratorType)
907True
908
909And more, added later.
910
911>>> i.gi_running
9120
913>>> type(i.gi_frame)
914<class 'frame'>
915>>> i.gi_running = 42
916Traceback (most recent call last):
917  ...
918AttributeError: attribute 'gi_running' of 'generator' objects is not writable
919>>> def g():
920...     yield me.gi_running
921>>> me = g()
922>>> me.gi_running
9230
924>>> next(me)
9251
926>>> me.gi_running
9270
928
929A clever union-find implementation from c.l.py, due to David Eppstein.
930Sent: Friday, June 29, 2001 12:16 PM
931To: python-list@python.org
932Subject: Re: PEP 255: Simple Generators
933
934>>> class disjointSet:
935...     def __init__(self, name):
936...         self.name = name
937...         self.parent = None
938...         self.generator = self.generate()
939...
940...     def generate(self):
941...         while not self.parent:
942...             yield self
943...         for x in self.parent.generator:
944...             yield x
945...
946...     def find(self):
947...         return next(self.generator)
948...
949...     def union(self, parent):
950...         if self.parent:
951...             raise ValueError("Sorry, I'm not a root!")
952...         self.parent = parent
953...
954...     def __str__(self):
955...         return self.name
956
957>>> names = "ABCDEFGHIJKLM"
958>>> sets = [disjointSet(name) for name in names]
959>>> roots = sets[:]
960
961>>> import random
962>>> gen = random.Random(42)
963>>> while 1:
964...     for s in sets:
965...         print(" %s->%s" % (s, s.find()), end='')
966...     print()
967...     if len(roots) > 1:
968...         s1 = gen.choice(roots)
969...         roots.remove(s1)
970...         s2 = gen.choice(roots)
971...         s1.union(s2)
972...         print("merged", s1, "into", s2)
973...     else:
974...         break
975 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
976merged K into B
977 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
978merged A into F
979 A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
980merged E into F
981 A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
982merged D into C
983 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
984merged M into C
985 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
986merged J into B
987 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
988merged B into C
989 A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
990merged F into G
991 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
992merged L into C
993 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
994merged G into I
995 A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
996merged I into H
997 A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
998merged C into H
999 A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
1000
1001"""
1002# Emacs turd '
1003
1004# Fun tests (for sufficiently warped notions of "fun").
1005
1006fun_tests = """
1007
1008Build up to a recursive Sieve of Eratosthenes generator.
1009
1010>>> def firstn(g, n):
1011...     return [next(g) for i in range(n)]
1012
1013>>> def intsfrom(i):
1014...     while 1:
1015...         yield i
1016...         i += 1
1017
1018>>> firstn(intsfrom(5), 7)
1019[5, 6, 7, 8, 9, 10, 11]
1020
1021>>> def exclude_multiples(n, ints):
1022...     for i in ints:
1023...         if i % n:
1024...             yield i
1025
1026>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
1027[1, 2, 4, 5, 7, 8]
1028
1029>>> def sieve(ints):
1030...     prime = next(ints)
1031...     yield prime
1032...     not_divisible_by_prime = exclude_multiples(prime, ints)
1033...     for p in sieve(not_divisible_by_prime):
1034...         yield p
1035
1036>>> primes = sieve(intsfrom(2))
1037>>> firstn(primes, 20)
1038[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
1039
1040
1041Another famous problem:  generate all integers of the form
1042    2**i * 3**j  * 5**k
1043in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
1044Try writing it without generators, and correctly, and without generating
10453 internal results for each result output.
1046
1047>>> def times(n, g):
1048...     for i in g:
1049...         yield n * i
1050>>> firstn(times(10, intsfrom(1)), 10)
1051[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
1052
1053>>> def merge(g, h):
1054...     ng = next(g)
1055...     nh = next(h)
1056...     while 1:
1057...         if ng < nh:
1058...             yield ng
1059...             ng = next(g)
1060...         elif ng > nh:
1061...             yield nh
1062...             nh = next(h)
1063...         else:
1064...             yield ng
1065...             ng = next(g)
1066...             nh = next(h)
1067
1068The following works, but is doing a whale of a lot of redundant work --
1069it's not clear how to get the internal uses of m235 to share a single
1070generator.  Note that me_times2 (etc) each need to see every element in the
1071result sequence.  So this is an example where lazy lists are more natural
1072(you can look at the head of a lazy list any number of times).
1073
1074>>> def m235():
1075...     yield 1
1076...     me_times2 = times(2, m235())
1077...     me_times3 = times(3, m235())
1078...     me_times5 = times(5, m235())
1079...     for i in merge(merge(me_times2,
1080...                          me_times3),
1081...                    me_times5):
1082...         yield i
1083
1084Don't print "too many" of these -- the implementation above is extremely
1085inefficient:  each call of m235() leads to 3 recursive calls, and in
1086turn each of those 3 more, and so on, and so on, until we've descended
1087enough levels to satisfy the print stmts.  Very odd:  when I printed 5
1088lines of results below, this managed to screw up Win98's malloc in "the
1089usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
1090address space, and it *looked* like a very slow leak.
1091
1092>>> result = m235()
1093>>> for i in range(3):
1094...     print(firstn(result, 15))
1095[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1096[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1097[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1098
1099Heh.  Here's one way to get a shared list, complete with an excruciating
1100namespace renaming trick.  The *pretty* part is that the times() and merge()
1101functions can be reused as-is, because they only assume their stream
1102arguments are iterable -- a LazyList is the same as a generator to times().
1103
1104>>> class LazyList:
1105...     def __init__(self, g):
1106...         self.sofar = []
1107...         self.fetch = g.__next__
1108...
1109...     def __getitem__(self, i):
1110...         sofar, fetch = self.sofar, self.fetch
1111...         while i >= len(sofar):
1112...             sofar.append(fetch())
1113...         return sofar[i]
1114
1115>>> def m235():
1116...     yield 1
1117...     # Gack:  m235 below actually refers to a LazyList.
1118...     me_times2 = times(2, m235)
1119...     me_times3 = times(3, m235)
1120...     me_times5 = times(5, m235)
1121...     for i in merge(merge(me_times2,
1122...                          me_times3),
1123...                    me_times5):
1124...         yield i
1125
1126Print as many of these as you like -- *this* implementation is memory-
1127efficient.
1128
1129>>> m235 = LazyList(m235())
1130>>> for i in range(5):
1131...     print([m235[j] for j in range(15*i, 15*(i+1))])
1132[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1133[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1134[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1135[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1136[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1137
1138Ye olde Fibonacci generator, LazyList style.
1139
1140>>> def fibgen(a, b):
1141...
1142...     def sum(g, h):
1143...         while 1:
1144...             yield next(g) + next(h)
1145...
1146...     def tail(g):
1147...         next(g)    # throw first away
1148...         for x in g:
1149...             yield x
1150...
1151...     yield a
1152...     yield b
1153...     for s in sum(iter(fib),
1154...                  tail(iter(fib))):
1155...         yield s
1156
1157>>> fib = LazyList(fibgen(1, 2))
1158>>> firstn(iter(fib), 17)
1159[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1160
1161
1162Running after your tail with itertools.tee (new in version 2.4)
1163
1164The algorithms "m235" (Hamming) and Fibonacci presented above are both
1165examples of a whole family of FP (functional programming) algorithms
1166where a function produces and returns a list while the production algorithm
1167suppose the list as already produced by recursively calling itself.
1168For these algorithms to work, they must:
1169
1170- produce at least a first element without presupposing the existence of
1171  the rest of the list
1172- produce their elements in a lazy manner
1173
1174To work efficiently, the beginning of the list must not be recomputed over
1175and over again. This is ensured in most FP languages as a built-in feature.
1176In python, we have to explicitly maintain a list of already computed results
1177and abandon genuine recursivity.
1178
1179This is what had been attempted above with the LazyList class. One problem
1180with that class is that it keeps a list of all of the generated results and
1181therefore continually grows. This partially defeats the goal of the generator
1182concept, viz. produce the results only as needed instead of producing them
1183all and thereby wasting memory.
1184
1185Thanks to itertools.tee, it is now clear "how to get the internal uses of
1186m235 to share a single generator".
1187
1188>>> from itertools import tee
1189>>> def m235():
1190...     def _m235():
1191...         yield 1
1192...         for n in merge(times(2, m2),
1193...                        merge(times(3, m3),
1194...                              times(5, m5))):
1195...             yield n
1196...     m1 = _m235()
1197...     m2, m3, m5, mRes = tee(m1, 4)
1198...     return mRes
1199
1200>>> it = m235()
1201>>> for i in range(5):
1202...     print(firstn(it, 15))
1203[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1204[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1205[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1206[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1207[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1208
1209The "tee" function does just what we want. It internally keeps a generated
1210result for as long as it has not been "consumed" from all of the duplicated
1211iterators, whereupon it is deleted. You can therefore print the hamming
1212sequence during hours without increasing memory usage, or very little.
1213
1214The beauty of it is that recursive running-after-their-tail FP algorithms
1215are quite straightforwardly expressed with this Python idiom.
1216
1217Ye olde Fibonacci generator, tee style.
1218
1219>>> def fib():
1220...
1221...     def _isum(g, h):
1222...         while 1:
1223...             yield next(g) + next(h)
1224...
1225...     def _fib():
1226...         yield 1
1227...         yield 2
1228...         next(fibTail) # throw first away
1229...         for res in _isum(fibHead, fibTail):
1230...             yield res
1231...
1232...     realfib = _fib()
1233...     fibHead, fibTail, fibRes = tee(realfib, 3)
1234...     return fibRes
1235
1236>>> firstn(fib(), 17)
1237[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1238
1239"""
1240
1241# syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
1242# hackery.
1243
1244syntax_tests = """
1245
1246These are fine:
1247
1248>>> def f():
1249...     yield 1
1250...     return
1251
1252>>> def f():
1253...     try:
1254...         yield 1
1255...     finally:
1256...         pass
1257
1258>>> def f():
1259...     try:
1260...         try:
1261...             1//0
1262...         except ZeroDivisionError:
1263...             yield 666
1264...         except:
1265...             pass
1266...     finally:
1267...         pass
1268
1269>>> def f():
1270...     try:
1271...         try:
1272...             yield 12
1273...             1//0
1274...         except ZeroDivisionError:
1275...             yield 666
1276...         except:
1277...             try:
1278...                 x = 12
1279...             finally:
1280...                 yield 12
1281...     except:
1282...         return
1283>>> list(f())
1284[12, 666]
1285
1286>>> def f():
1287...    yield
1288>>> type(f())
1289<class 'generator'>
1290
1291
1292>>> def f():
1293...    if 0:
1294...        yield
1295>>> type(f())
1296<class 'generator'>
1297
1298
1299>>> def f():
1300...     if 0:
1301...         yield 1
1302>>> type(f())
1303<class 'generator'>
1304
1305>>> def f():
1306...    if "":
1307...        yield None
1308>>> type(f())
1309<class 'generator'>
1310
1311>>> def f():
1312...     return
1313...     try:
1314...         if x==4:
1315...             pass
1316...         elif 0:
1317...             try:
1318...                 1//0
1319...             except SyntaxError:
1320...                 pass
1321...             else:
1322...                 if 0:
1323...                     while 12:
1324...                         x += 1
1325...                         yield 2 # don't blink
1326...                         f(a, b, c, d, e)
1327...         else:
1328...             pass
1329...     except:
1330...         x = 1
1331...     return
1332>>> type(f())
1333<class 'generator'>
1334
1335>>> def f():
1336...     if 0:
1337...         def g():
1338...             yield 1
1339...
1340>>> type(f())
1341<class 'NoneType'>
1342
1343>>> def f():
1344...     if 0:
1345...         class C:
1346...             def __init__(self):
1347...                 yield 1
1348...             def f(self):
1349...                 yield 2
1350>>> type(f())
1351<class 'NoneType'>
1352
1353>>> def f():
1354...     if 0:
1355...         return
1356...     if 0:
1357...         yield 2
1358>>> type(f())
1359<class 'generator'>
1360
1361This one caused a crash (see SF bug 567538):
1362
1363>>> def f():
1364...     for i in range(3):
1365...         try:
1366...             continue
1367...         finally:
1368...             yield i
1369...
1370>>> g = f()
1371>>> print(next(g))
13720
1373>>> print(next(g))
13741
1375>>> print(next(g))
13762
1377>>> print(next(g))
1378Traceback (most recent call last):
1379StopIteration
1380
1381
1382Test the gi_code attribute
1383
1384>>> def f():
1385...     yield 5
1386...
1387>>> g = f()
1388>>> g.gi_code is f.__code__
1389True
1390>>> next(g)
13915
1392>>> next(g)
1393Traceback (most recent call last):
1394StopIteration
1395>>> g.gi_code is f.__code__
1396True
1397
1398
1399Test the __name__ attribute and the repr()
1400
1401>>> def f():
1402...    yield 5
1403...
1404>>> g = f()
1405>>> g.__name__
1406'f'
1407>>> repr(g)  # doctest: +ELLIPSIS
1408'<generator object f at ...>'
1409
1410Lambdas shouldn't have their usual return behavior.
1411
1412>>> x = lambda: (yield 1)
1413>>> list(x())
1414[1]
1415
1416>>> x = lambda: ((yield 1), (yield 2))
1417>>> list(x())
1418[1, 2]
1419"""
1420
1421# conjoin is a simple backtracking generator, named in honor of Icon's
1422# "conjunction" control structure.  Pass a list of no-argument functions
1423# that return iterable objects.  Easiest to explain by example:  assume the
1424# function list [x, y, z] is passed.  Then conjoin acts like:
1425#
1426# def g():
1427#     values = [None] * 3
1428#     for values[0] in x():
1429#         for values[1] in y():
1430#             for values[2] in z():
1431#                 yield values
1432#
1433# So some 3-lists of values *may* be generated, each time we successfully
1434# get into the innermost loop.  If an iterator fails (is exhausted) before
1435# then, it "backtracks" to get the next value from the nearest enclosing
1436# iterator (the one "to the left"), and starts all over again at the next
1437# slot (pumps a fresh iterator).  Of course this is most useful when the
1438# iterators have side-effects, so that which values *can* be generated at
1439# each slot depend on the values iterated at previous slots.
1440
1441def simple_conjoin(gs):
1442
1443    values = [None] * len(gs)
1444
1445    def gen(i):
1446        if i >= len(gs):
1447            yield values
1448        else:
1449            for values[i] in gs[i]():
1450                for x in gen(i+1):
1451                    yield x
1452
1453    for x in gen(0):
1454        yield x
1455
1456# That works fine, but recursing a level and checking i against len(gs) for
1457# each item produced is inefficient.  By doing manual loop unrolling across
1458# generator boundaries, it's possible to eliminate most of that overhead.
1459# This isn't worth the bother *in general* for generators, but conjoin() is
1460# a core building block for some CPU-intensive generator applications.
1461
1462def conjoin(gs):
1463
1464    n = len(gs)
1465    values = [None] * n
1466
1467    # Do one loop nest at time recursively, until the # of loop nests
1468    # remaining is divisible by 3.
1469
1470    def gen(i):
1471        if i >= n:
1472            yield values
1473
1474        elif (n-i) % 3:
1475            ip1 = i+1
1476            for values[i] in gs[i]():
1477                for x in gen(ip1):
1478                    yield x
1479
1480        else:
1481            for x in _gen3(i):
1482                yield x
1483
1484    # Do three loop nests at a time, recursing only if at least three more
1485    # remain.  Don't call directly:  this is an internal optimization for
1486    # gen's use.
1487
1488    def _gen3(i):
1489        assert i < n and (n-i) % 3 == 0
1490        ip1, ip2, ip3 = i+1, i+2, i+3
1491        g, g1, g2 = gs[i : ip3]
1492
1493        if ip3 >= n:
1494            # These are the last three, so we can yield values directly.
1495            for values[i] in g():
1496                for values[ip1] in g1():
1497                    for values[ip2] in g2():
1498                        yield values
1499
1500        else:
1501            # At least 6 loop nests remain; peel off 3 and recurse for the
1502            # rest.
1503            for values[i] in g():
1504                for values[ip1] in g1():
1505                    for values[ip2] in g2():
1506                        for x in _gen3(ip3):
1507                            yield x
1508
1509    for x in gen(0):
1510        yield x
1511
1512# And one more approach:  For backtracking apps like the Knight's Tour
1513# solver below, the number of backtracking levels can be enormous (one
1514# level per square, for the Knight's Tour, so that e.g. a 100x100 board
1515# needs 10,000 levels).  In such cases Python is likely to run out of
1516# stack space due to recursion.  So here's a recursion-free version of
1517# conjoin too.
1518# NOTE WELL:  This allows large problems to be solved with only trivial
1519# demands on stack space.  Without explicitly resumable generators, this is
1520# much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
1521# than the fancy unrolled recursive conjoin.
1522
1523def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
1524    n = len(gs)
1525    values = [None] * n
1526    iters  = [None] * n
1527    _StopIteration = StopIteration  # make local because caught a *lot*
1528    i = 0
1529    while 1:
1530        # Descend.
1531        try:
1532            while i < n:
1533                it = iters[i] = gs[i]().__next__
1534                values[i] = it()
1535                i += 1
1536        except _StopIteration:
1537            pass
1538        else:
1539            assert i == n
1540            yield values
1541
1542        # Backtrack until an older iterator can be resumed.
1543        i -= 1
1544        while i >= 0:
1545            try:
1546                values[i] = iters[i]()
1547                # Success!  Start fresh at next level.
1548                i += 1
1549                break
1550            except _StopIteration:
1551                # Continue backtracking.
1552                i -= 1
1553        else:
1554            assert i < 0
1555            break
1556
1557# A conjoin-based N-Queens solver.
1558
1559class Queens:
1560    def __init__(self, n):
1561        self.n = n
1562        rangen = range(n)
1563
1564        # Assign a unique int to each column and diagonal.
1565        # columns:  n of those, range(n).
1566        # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
1567        # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
1568        # based.
1569        # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
1570        # each, smallest i+j is 0, largest is 2n-2.
1571
1572        # For each square, compute a bit vector of the columns and
1573        # diagonals it covers, and for each row compute a function that
1574        # generates the possibilities for the columns in that row.
1575        self.rowgenerators = []
1576        for i in rangen:
1577            rowuses = [(1 << j) |                  # column ordinal
1578                       (1 << (n + i-j + n-1)) |    # NW-SE ordinal
1579                       (1 << (n + 2*n-1 + i+j))    # NE-SW ordinal
1580                            for j in rangen]
1581
1582            def rowgen(rowuses=rowuses):
1583                for j in rangen:
1584                    uses = rowuses[j]
1585                    if uses & self.used == 0:
1586                        self.used |= uses
1587                        yield j
1588                        self.used &= ~uses
1589
1590            self.rowgenerators.append(rowgen)
1591
1592    # Generate solutions.
1593    def solve(self):
1594        self.used = 0
1595        for row2col in conjoin(self.rowgenerators):
1596            yield row2col
1597
1598    def printsolution(self, row2col):
1599        n = self.n
1600        assert n == len(row2col)
1601        sep = "+" + "-+" * n
1602        print(sep)
1603        for i in range(n):
1604            squares = [" " for j in range(n)]
1605            squares[row2col[i]] = "Q"
1606            print("|" + "|".join(squares) + "|")
1607            print(sep)
1608
1609# A conjoin-based Knight's Tour solver.  This is pretty sophisticated
1610# (e.g., when used with flat_conjoin above, and passing hard=1 to the
1611# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
1612# creating 10s of thousands of generators then!), and is lengthy.
1613
1614class Knights:
1615    def __init__(self, m, n, hard=0):
1616        self.m, self.n = m, n
1617
1618        # solve() will set up succs[i] to be a list of square #i's
1619        # successors.
1620        succs = self.succs = []
1621
1622        # Remove i0 from each of its successor's successor lists, i.e.
1623        # successors can't go back to i0 again.  Return 0 if we can
1624        # detect this makes a solution impossible, else return 1.
1625
1626        def remove_from_successors(i0, len=len):
1627            # If we remove all exits from a free square, we're dead:
1628            # even if we move to it next, we can't leave it again.
1629            # If we create a square with one exit, we must visit it next;
1630            # else somebody else will have to visit it, and since there's
1631            # only one adjacent, there won't be a way to leave it again.
1632            # Finally, if we create more than one free square with a
1633            # single exit, we can only move to one of them next, leaving
1634            # the other one a dead end.
1635            ne0 = ne1 = 0
1636            for i in succs[i0]:
1637                s = succs[i]
1638                s.remove(i0)
1639                e = len(s)
1640                if e == 0:
1641                    ne0 += 1
1642                elif e == 1:
1643                    ne1 += 1
1644            return ne0 == 0 and ne1 < 2
1645
1646        # Put i0 back in each of its successor's successor lists.
1647
1648        def add_to_successors(i0):
1649            for i in succs[i0]:
1650                succs[i].append(i0)
1651
1652        # Generate the first move.
1653        def first():
1654            if m < 1 or n < 1:
1655                return
1656
1657            # Since we're looking for a cycle, it doesn't matter where we
1658            # start.  Starting in a corner makes the 2nd move easy.
1659            corner = self.coords2index(0, 0)
1660            remove_from_successors(corner)
1661            self.lastij = corner
1662            yield corner
1663            add_to_successors(corner)
1664
1665        # Generate the second moves.
1666        def second():
1667            corner = self.coords2index(0, 0)
1668            assert self.lastij == corner  # i.e., we started in the corner
1669            if m < 3 or n < 3:
1670                return
1671            assert len(succs[corner]) == 2
1672            assert self.coords2index(1, 2) in succs[corner]
1673            assert self.coords2index(2, 1) in succs[corner]
1674            # Only two choices.  Whichever we pick, the other must be the
1675            # square picked on move m*n, as it's the only way to get back
1676            # to (0, 0).  Save its index in self.final so that moves before
1677            # the last know it must be kept free.
1678            for i, j in (1, 2), (2, 1):
1679                this  = self.coords2index(i, j)
1680                final = self.coords2index(3-i, 3-j)
1681                self.final = final
1682
1683                remove_from_successors(this)
1684                succs[final].append(corner)
1685                self.lastij = this
1686                yield this
1687                succs[final].remove(corner)
1688                add_to_successors(this)
1689
1690        # Generate moves 3 through m*n-1.
1691        def advance(len=len):
1692            # If some successor has only one exit, must take it.
1693            # Else favor successors with fewer exits.
1694            candidates = []
1695            for i in succs[self.lastij]:
1696                e = len(succs[i])
1697                assert e > 0, "else remove_from_successors() pruning flawed"
1698                if e == 1:
1699                    candidates = [(e, i)]
1700                    break
1701                candidates.append((e, i))
1702            else:
1703                candidates.sort()
1704
1705            for e, i in candidates:
1706                if i != self.final:
1707                    if remove_from_successors(i):
1708                        self.lastij = i
1709                        yield i
1710                    add_to_successors(i)
1711
1712        # Generate moves 3 through m*n-1.  Alternative version using a
1713        # stronger (but more expensive) heuristic to order successors.
1714        # Since the # of backtracking levels is m*n, a poor move early on
1715        # can take eons to undo.  Smallest square board for which this
1716        # matters a lot is 52x52.
1717        def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
1718            # If some successor has only one exit, must take it.
1719            # Else favor successors with fewer exits.
1720            # Break ties via max distance from board centerpoint (favor
1721            # corners and edges whenever possible).
1722            candidates = []
1723            for i in succs[self.lastij]:
1724                e = len(succs[i])
1725                assert e > 0, "else remove_from_successors() pruning flawed"
1726                if e == 1:
1727                    candidates = [(e, 0, i)]
1728                    break
1729                i1, j1 = self.index2coords(i)
1730                d = (i1 - vmid)**2 + (j1 - hmid)**2
1731                candidates.append((e, -d, i))
1732            else:
1733                candidates.sort()
1734
1735            for e, d, i in candidates:
1736                if i != self.final:
1737                    if remove_from_successors(i):
1738                        self.lastij = i
1739                        yield i
1740                    add_to_successors(i)
1741
1742        # Generate the last move.
1743        def last():
1744            assert self.final in succs[self.lastij]
1745            yield self.final
1746
1747        if m*n < 4:
1748            self.squaregenerators = [first]
1749        else:
1750            self.squaregenerators = [first, second] + \
1751                [hard and advance_hard or advance] * (m*n - 3) + \
1752                [last]
1753
1754    def coords2index(self, i, j):
1755        assert 0 <= i < self.m
1756        assert 0 <= j < self.n
1757        return i * self.n + j
1758
1759    def index2coords(self, index):
1760        assert 0 <= index < self.m * self.n
1761        return divmod(index, self.n)
1762
1763    def _init_board(self):
1764        succs = self.succs
1765        del succs[:]
1766        m, n = self.m, self.n
1767        c2i = self.coords2index
1768
1769        offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
1770                   (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
1771        rangen = range(n)
1772        for i in range(m):
1773            for j in rangen:
1774                s = [c2i(i+io, j+jo) for io, jo in offsets
1775                                     if 0 <= i+io < m and
1776                                        0 <= j+jo < n]
1777                succs.append(s)
1778
1779    # Generate solutions.
1780    def solve(self):
1781        self._init_board()
1782        for x in conjoin(self.squaregenerators):
1783            yield x
1784
1785    def printsolution(self, x):
1786        m, n = self.m, self.n
1787        assert len(x) == m*n
1788        w = len(str(m*n))
1789        format = "%" + str(w) + "d"
1790
1791        squares = [[None] * n for i in range(m)]
1792        k = 1
1793        for i in x:
1794            i1, j1 = self.index2coords(i)
1795            squares[i1][j1] = format % k
1796            k += 1
1797
1798        sep = "+" + ("-" * w + "+") * n
1799        print(sep)
1800        for i in range(m):
1801            row = squares[i]
1802            print("|" + "|".join(row) + "|")
1803            print(sep)
1804
1805conjoin_tests = """
1806
1807Generate the 3-bit binary numbers in order.  This illustrates dumbest-
1808possible use of conjoin, just to generate the full cross-product.
1809
1810>>> for c in conjoin([lambda: iter((0, 1))] * 3):
1811...     print(c)
1812[0, 0, 0]
1813[0, 0, 1]
1814[0, 1, 0]
1815[0, 1, 1]
1816[1, 0, 0]
1817[1, 0, 1]
1818[1, 1, 0]
1819[1, 1, 1]
1820
1821For efficiency in typical backtracking apps, conjoin() yields the same list
1822object each time.  So if you want to save away a full account of its
1823generated sequence, you need to copy its results.
1824
1825>>> def gencopy(iterator):
1826...     for x in iterator:
1827...         yield x[:]
1828
1829>>> for n in range(10):
1830...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
1831...     print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
18320 1 True True
18331 2 True True
18342 4 True True
18353 8 True True
18364 16 True True
18375 32 True True
18386 64 True True
18397 128 True True
18408 256 True True
18419 512 True True
1842
1843And run an 8-queens solver.
1844
1845>>> q = Queens(8)
1846>>> LIMIT = 2
1847>>> count = 0
1848>>> for row2col in q.solve():
1849...     count += 1
1850...     if count <= LIMIT:
1851...         print("Solution", count)
1852...         q.printsolution(row2col)
1853Solution 1
1854+-+-+-+-+-+-+-+-+
1855|Q| | | | | | | |
1856+-+-+-+-+-+-+-+-+
1857| | | | |Q| | | |
1858+-+-+-+-+-+-+-+-+
1859| | | | | | | |Q|
1860+-+-+-+-+-+-+-+-+
1861| | | | | |Q| | |
1862+-+-+-+-+-+-+-+-+
1863| | |Q| | | | | |
1864+-+-+-+-+-+-+-+-+
1865| | | | | | |Q| |
1866+-+-+-+-+-+-+-+-+
1867| |Q| | | | | | |
1868+-+-+-+-+-+-+-+-+
1869| | | |Q| | | | |
1870+-+-+-+-+-+-+-+-+
1871Solution 2
1872+-+-+-+-+-+-+-+-+
1873|Q| | | | | | | |
1874+-+-+-+-+-+-+-+-+
1875| | | | | |Q| | |
1876+-+-+-+-+-+-+-+-+
1877| | | | | | | |Q|
1878+-+-+-+-+-+-+-+-+
1879| | |Q| | | | | |
1880+-+-+-+-+-+-+-+-+
1881| | | | | | |Q| |
1882+-+-+-+-+-+-+-+-+
1883| | | |Q| | | | |
1884+-+-+-+-+-+-+-+-+
1885| |Q| | | | | | |
1886+-+-+-+-+-+-+-+-+
1887| | | | |Q| | | |
1888+-+-+-+-+-+-+-+-+
1889
1890>>> print(count, "solutions in all.")
189192 solutions in all.
1892
1893And run a Knight's Tour on a 10x10 board.  Note that there are about
189420,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
1895
1896>>> k = Knights(10, 10)
1897>>> LIMIT = 2
1898>>> count = 0
1899>>> for x in k.solve():
1900...     count += 1
1901...     if count <= LIMIT:
1902...         print("Solution", count)
1903...         k.printsolution(x)
1904...     else:
1905...         break
1906Solution 1
1907+---+---+---+---+---+---+---+---+---+---+
1908|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1909+---+---+---+---+---+---+---+---+---+---+
1910| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1911+---+---+---+---+---+---+---+---+---+---+
1912| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1913+---+---+---+---+---+---+---+---+---+---+
1914| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1915+---+---+---+---+---+---+---+---+---+---+
1916| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1917+---+---+---+---+---+---+---+---+---+---+
1918| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1919+---+---+---+---+---+---+---+---+---+---+
1920| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
1921+---+---+---+---+---+---+---+---+---+---+
1922| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
1923+---+---+---+---+---+---+---+---+---+---+
1924| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
1925+---+---+---+---+---+---+---+---+---+---+
1926| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
1927+---+---+---+---+---+---+---+---+---+---+
1928Solution 2
1929+---+---+---+---+---+---+---+---+---+---+
1930|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1931+---+---+---+---+---+---+---+---+---+---+
1932| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1933+---+---+---+---+---+---+---+---+---+---+
1934| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1935+---+---+---+---+---+---+---+---+---+---+
1936| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1937+---+---+---+---+---+---+---+---+---+---+
1938| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1939+---+---+---+---+---+---+---+---+---+---+
1940| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1941+---+---+---+---+---+---+---+---+---+---+
1942| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
1943+---+---+---+---+---+---+---+---+---+---+
1944| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
1945+---+---+---+---+---+---+---+---+---+---+
1946| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
1947+---+---+---+---+---+---+---+---+---+---+
1948| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
1949+---+---+---+---+---+---+---+---+---+---+
1950"""
1951
1952weakref_tests = """\
1953Generators are weakly referencable:
1954
1955>>> import weakref
1956>>> def gen():
1957...     yield 'foo!'
1958...
1959>>> wr = weakref.ref(gen)
1960>>> wr() is gen
1961True
1962>>> p = weakref.proxy(gen)
1963
1964Generator-iterators are weakly referencable as well:
1965
1966>>> gi = gen()
1967>>> wr = weakref.ref(gi)
1968>>> wr() is gi
1969True
1970>>> p = weakref.proxy(gi)
1971>>> list(p)
1972['foo!']
1973
1974"""
1975
1976coroutine_tests = """\
1977>>> from test.support import gc_collect
1978
1979Sending a value into a started generator:
1980
1981>>> def f():
1982...     print((yield 1))
1983...     yield 2
1984>>> g = f()
1985>>> next(g)
19861
1987>>> g.send(42)
198842
19892
1990
1991Sending a value into a new generator produces a TypeError:
1992
1993>>> f().send("foo")
1994Traceback (most recent call last):
1995...
1996TypeError: can't send non-None value to a just-started generator
1997
1998
1999Yield by itself yields None:
2000
2001>>> def f(): yield
2002>>> list(f())
2003[None]
2004
2005
2006Yield is allowed only in the outermost iterable in generator expression:
2007
2008>>> def f(): list(i for i in [(yield 26)])
2009>>> type(f())
2010<class 'generator'>
2011
2012
2013A yield expression with augmented assignment.
2014
2015>>> def coroutine(seq):
2016...     count = 0
2017...     while count < 200:
2018...         count += yield
2019...         seq.append(count)
2020>>> seq = []
2021>>> c = coroutine(seq)
2022>>> next(c)
2023>>> print(seq)
2024[]
2025>>> c.send(10)
2026>>> print(seq)
2027[10]
2028>>> c.send(10)
2029>>> print(seq)
2030[10, 20]
2031>>> c.send(10)
2032>>> print(seq)
2033[10, 20, 30]
2034
2035
2036Check some syntax errors for yield expressions:
2037
2038>>> f=lambda: (yield 1),(yield 2)
2039Traceback (most recent call last):
2040  ...
2041SyntaxError: 'yield' outside function
2042
2043# Pegen does not produce this error message yet
2044# >>> def f(): x = yield = y
2045# Traceback (most recent call last):
2046#   ...
2047# SyntaxError: assignment to yield expression not possible
2048
2049>>> def f(): (yield bar) = y
2050Traceback (most recent call last):
2051  ...
2052SyntaxError: cannot assign to yield expression here. Maybe you meant '==' instead of '='?
2053
2054>>> def f(): (yield bar) += y
2055Traceback (most recent call last):
2056  ...
2057SyntaxError: 'yield expression' is an illegal expression for augmented assignment
2058
2059
2060Now check some throw() conditions:
2061
2062>>> def f():
2063...     while True:
2064...         try:
2065...             print((yield))
2066...         except ValueError as v:
2067...             print("caught ValueError (%s)" % (v))
2068>>> import sys
2069>>> g = f()
2070>>> next(g)
2071
2072>>> g.throw(ValueError) # type only
2073caught ValueError ()
2074
2075>>> g.throw(ValueError("xyz"))  # value only
2076caught ValueError (xyz)
2077
2078>>> g.throw(ValueError, ValueError(1))   # value+matching type
2079caught ValueError (1)
2080
2081>>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
2082caught ValueError (1)
2083
2084>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
2085caught ValueError (1)
2086
2087>>> g.throw(ValueError(1), "foo")       # bad args
2088Traceback (most recent call last):
2089  ...
2090TypeError: instance exception may not have a separate value
2091
2092>>> g.throw(ValueError, "foo", 23)      # bad args
2093Traceback (most recent call last):
2094  ...
2095TypeError: throw() third argument must be a traceback object
2096
2097>>> g.throw("abc")
2098Traceback (most recent call last):
2099  ...
2100TypeError: exceptions must be classes or instances deriving from BaseException, not str
2101
2102>>> g.throw(0)
2103Traceback (most recent call last):
2104  ...
2105TypeError: exceptions must be classes or instances deriving from BaseException, not int
2106
2107>>> g.throw(list)
2108Traceback (most recent call last):
2109  ...
2110TypeError: exceptions must be classes or instances deriving from BaseException, not type
2111
2112>>> def throw(g,exc):
2113...     try:
2114...         raise exc
2115...     except:
2116...         g.throw(*sys.exc_info())
2117>>> throw(g,ValueError) # do it with traceback included
2118caught ValueError ()
2119
2120>>> g.send(1)
21211
2122
2123>>> throw(g,TypeError)  # terminate the generator
2124Traceback (most recent call last):
2125  ...
2126TypeError
2127
2128>>> print(g.gi_frame)
2129None
2130
2131>>> g.send(2)
2132Traceback (most recent call last):
2133  ...
2134StopIteration
2135
2136>>> g.throw(ValueError,6)       # throw on closed generator
2137Traceback (most recent call last):
2138  ...
2139ValueError: 6
2140
2141>>> f().throw(ValueError,7)     # throw on just-opened generator
2142Traceback (most recent call last):
2143  ...
2144ValueError: 7
2145
2146Plain "raise" inside a generator should preserve the traceback (#13188).
2147The traceback should have 3 levels:
2148- g.throw()
2149- f()
2150- 1/0
2151
2152>>> def f():
2153...     try:
2154...         yield
2155...     except:
2156...         raise
2157>>> g = f()
2158>>> try:
2159...     1/0
2160... except ZeroDivisionError as v:
2161...     try:
2162...         g.throw(v)
2163...     except Exception as w:
2164...         tb = w.__traceback__
2165>>> levels = 0
2166>>> while tb:
2167...     levels += 1
2168...     tb = tb.tb_next
2169>>> levels
21703
2171
2172Now let's try closing a generator:
2173
2174>>> def f():
2175...     try: yield
2176...     except GeneratorExit:
2177...         print("exiting")
2178
2179>>> g = f()
2180>>> next(g)
2181>>> g.close()
2182exiting
2183>>> g.close()  # should be no-op now
2184
2185>>> f().close()  # close on just-opened generator should be fine
2186
2187>>> def f(): yield      # an even simpler generator
2188>>> f().close()         # close before opening
2189>>> g = f()
2190>>> next(g)
2191>>> g.close()           # close normally
2192
2193And finalization:
2194
2195>>> def f():
2196...     try: yield
2197...     finally:
2198...         print("exiting")
2199
2200>>> g = f()
2201>>> next(g)
2202>>> del g; gc_collect()  # For PyPy or other GCs.
2203exiting
2204
2205
2206GeneratorExit is not caught by except Exception:
2207
2208>>> def f():
2209...     try: yield
2210...     except Exception:
2211...         print('except')
2212...     finally:
2213...         print('finally')
2214
2215>>> g = f()
2216>>> next(g)
2217>>> del g; gc_collect()  # For PyPy or other GCs.
2218finally
2219
2220
2221Now let's try some ill-behaved generators:
2222
2223>>> def f():
2224...     try: yield
2225...     except GeneratorExit:
2226...         yield "foo!"
2227>>> g = f()
2228>>> next(g)
2229>>> g.close()
2230Traceback (most recent call last):
2231  ...
2232RuntimeError: generator ignored GeneratorExit
2233>>> g.close()
2234
2235
2236Our ill-behaved code should be invoked during GC:
2237
2238>>> with support.catch_unraisable_exception() as cm:
2239...     g = f()
2240...     next(g)
2241...     del g
2242...
2243...     cm.unraisable.exc_type == RuntimeError
2244...     "generator ignored GeneratorExit" in str(cm.unraisable.exc_value)
2245...     cm.unraisable.exc_traceback is not None
2246True
2247True
2248True
2249
2250And errors thrown during closing should propagate:
2251
2252>>> def f():
2253...     try: yield
2254...     except GeneratorExit:
2255...         raise TypeError("fie!")
2256>>> g = f()
2257>>> next(g)
2258>>> g.close()
2259Traceback (most recent call last):
2260  ...
2261TypeError: fie!
2262
2263
2264Ensure that various yield expression constructs make their
2265enclosing function a generator:
2266
2267>>> def f(): x += yield
2268>>> type(f())
2269<class 'generator'>
2270
2271>>> def f(): x = yield
2272>>> type(f())
2273<class 'generator'>
2274
2275>>> def f(): lambda x=(yield): 1
2276>>> type(f())
2277<class 'generator'>
2278
2279>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
2280>>> data = [1,2]
2281>>> g = f(data)
2282>>> type(g)
2283<class 'generator'>
2284>>> g.send(None)
2285'a'
2286>>> data
2287[1, 2]
2288>>> g.send(0)
2289'b'
2290>>> data
2291[27, 2]
2292>>> try: g.send(1)
2293... except StopIteration: pass
2294>>> data
2295[27, 27]
2296
2297"""
2298
2299refleaks_tests = """
2300Prior to adding cycle-GC support to itertools.tee, this code would leak
2301references. We add it to the standard suite so the routine refleak-tests
2302would trigger if it starts being uncleanable again.
2303
2304>>> import itertools
2305>>> def leak():
2306...     class gen:
2307...         def __iter__(self):
2308...             return self
2309...         def __next__(self):
2310...             return self.item
2311...     g = gen()
2312...     head, tail = itertools.tee(g)
2313...     g.item = head
2314...     return head
2315>>> it = leak()
2316
2317Make sure to also test the involvement of the tee-internal teedataobject,
2318which stores returned items.
2319
2320>>> item = next(it)
2321
2322
2323
2324This test leaked at one point due to generator finalization/destruction.
2325It was copied from Lib/test/leakers/test_generator_cycle.py before the file
2326was removed.
2327
2328>>> def leak():
2329...    def gen():
2330...        while True:
2331...            yield g
2332...    g = gen()
2333
2334>>> leak()
2335
2336
2337
2338This test isn't really generator related, but rather exception-in-cleanup
2339related. The coroutine tests (above) just happen to cause an exception in
2340the generator's __del__ (tp_del) method. We can also test for this
2341explicitly, without generators. We do have to redirect stderr to avoid
2342printing warnings and to doublecheck that we actually tested what we wanted
2343to test.
2344
2345>>> from test import support
2346>>> class Leaker:
2347...     def __del__(self):
2348...         def invoke(message):
2349...             raise RuntimeError(message)
2350...         invoke("del failed")
2351...
2352>>> with support.catch_unraisable_exception() as cm:
2353...     l = Leaker()
2354...     del l
2355...
2356...     cm.unraisable.object == Leaker.__del__
2357...     cm.unraisable.exc_type == RuntimeError
2358...     str(cm.unraisable.exc_value) == "del failed"
2359...     cm.unraisable.exc_traceback is not None
2360True
2361True
2362True
2363True
2364
2365
2366These refleak tests should perhaps be in a testfile of their own,
2367test_generators just happened to be the test that drew these out.
2368
2369"""
2370
2371__test__ = {"tut":      tutorial_tests,
2372            "pep":      pep_tests,
2373            "email":    email_tests,
2374            "fun":      fun_tests,
2375            "syntax":   syntax_tests,
2376            "conjoin":  conjoin_tests,
2377            "weakref":  weakref_tests,
2378            "coroutine":  coroutine_tests,
2379            "refleaks": refleaks_tests,
2380            }
2381
2382# Magic test name that regrtest.py invokes *after* importing this module.
2383# This worms around a bootstrap problem.
2384# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
2385# so this works as expected in both ways of running regrtest.
2386def test_main(verbose=None):
2387    from test import support, test_generators
2388    support.run_unittest(__name__)
2389    support.run_doctest(test_generators, verbose)
2390
2391# This part isn't needed for regrtest, but for running the test directly.
2392if __name__ == "__main__":
2393    test_main(1)
2394