• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1import copy
2import gc
3import pickle
4import sys
5import doctest
6import unittest
7import weakref
8import inspect
9import types
10
11from test import support
12
13try:
14    import _testcapi
15except ImportError:
16    _testcapi = None
17
18
19# This tests to make sure that if a SIGINT arrives just before we send into a
20# yield from chain, the KeyboardInterrupt is raised in the innermost
21# generator (see bpo-30039).
22@unittest.skipUnless(_testcapi is not None and
23                     hasattr(_testcapi, "raise_SIGINT_then_send_None"),
24                     "needs _testcapi.raise_SIGINT_then_send_None")
25class SignalAndYieldFromTest(unittest.TestCase):
26
27    def generator1(self):
28        return (yield from self.generator2())
29
30    def generator2(self):
31        try:
32            yield
33        except KeyboardInterrupt:
34            return "PASSED"
35        else:
36            return "FAILED"
37
38    def test_raise_and_yield_from(self):
39        gen = self.generator1()
40        gen.send(None)
41        try:
42            _testcapi.raise_SIGINT_then_send_None(gen)
43        except BaseException as _exc:
44            exc = _exc
45        self.assertIs(type(exc), StopIteration)
46        self.assertEqual(exc.value, "PASSED")
47
48
49class FinalizationTest(unittest.TestCase):
50
51    def test_frame_resurrect(self):
52        # A generator frame can be resurrected by a generator's finalization.
53        def gen():
54            nonlocal frame
55            try:
56                yield
57            finally:
58                frame = sys._getframe()
59
60        g = gen()
61        wr = weakref.ref(g)
62        next(g)
63        del g
64        support.gc_collect()
65        self.assertIs(wr(), None)
66        self.assertTrue(frame)
67        del frame
68        support.gc_collect()
69
70    def test_refcycle(self):
71        # A generator caught in a refcycle gets finalized anyway.
72        old_garbage = gc.garbage[:]
73        finalized = False
74        def gen():
75            nonlocal finalized
76            try:
77                g = yield
78                yield 1
79            finally:
80                finalized = True
81
82        g = gen()
83        next(g)
84        g.send(g)
85        self.assertGreater(sys.getrefcount(g), 2)
86        self.assertFalse(finalized)
87        del g
88        support.gc_collect()
89        self.assertTrue(finalized)
90        self.assertEqual(gc.garbage, old_garbage)
91
92    def test_lambda_generator(self):
93        # bpo-23192, gh-119897: Test that a lambda returning a generator behaves
94        # like the equivalent function
95        f = lambda: (yield 1)
96        self.assertIsInstance(f(), types.GeneratorType)
97        self.assertEqual(next(f()), 1)
98
99        def g(): return (yield 1)
100
101        # test 'yield from'
102        f2 = lambda: (yield from g())
103        def g2(): return (yield from g())
104
105        f3 = lambda: (yield from f())
106        def g3(): return (yield from f())
107
108        for gen_fun in (f, g, f2, g2, f3, g3):
109            gen = gen_fun()
110            self.assertEqual(next(gen), 1)
111            with self.assertRaises(StopIteration) as cm:
112                gen.send(2)
113            self.assertEqual(cm.exception.value, 2)
114
115
116class GeneratorTest(unittest.TestCase):
117
118    def test_name(self):
119        def func():
120            yield 1
121
122        # check generator names
123        gen = func()
124        self.assertEqual(gen.__name__, "func")
125        self.assertEqual(gen.__qualname__,
126                         "GeneratorTest.test_name.<locals>.func")
127
128        # modify generator names
129        gen.__name__ = "name"
130        gen.__qualname__ = "qualname"
131        self.assertEqual(gen.__name__, "name")
132        self.assertEqual(gen.__qualname__, "qualname")
133
134        # generator names must be a string and cannot be deleted
135        self.assertRaises(TypeError, setattr, gen, '__name__', 123)
136        self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
137        self.assertRaises(TypeError, delattr, gen, '__name__')
138        self.assertRaises(TypeError, delattr, gen, '__qualname__')
139
140        # modify names of the function creating the generator
141        func.__qualname__ = "func_qualname"
142        func.__name__ = "func_name"
143        gen = func()
144        self.assertEqual(gen.__name__, "func_name")
145        self.assertEqual(gen.__qualname__, "func_qualname")
146
147        # unnamed generator
148        gen = (x for x in range(10))
149        self.assertEqual(gen.__name__,
150                         "<genexpr>")
151        self.assertEqual(gen.__qualname__,
152                         "GeneratorTest.test_name.<locals>.<genexpr>")
153
154    def test_copy(self):
155        def f():
156            yield 1
157        g = f()
158        with self.assertRaises(TypeError):
159            copy.copy(g)
160
161    def test_pickle(self):
162        def f():
163            yield 1
164        g = f()
165        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
166            with self.assertRaises((TypeError, pickle.PicklingError)):
167                pickle.dumps(g, proto)
168
169    def test_send_non_none_to_new_gen(self):
170        def f():
171            yield 1
172        g = f()
173        with self.assertRaises(TypeError):
174            g.send(0)
175        self.assertEqual(next(g), 1)
176
177    def test_handle_frame_object_in_creation(self):
178
179        #Attempt to expose partially constructed frames
180        #See https://github.com/python/cpython/issues/94262
181
182        def cb(*args):
183            inspect.stack()
184
185        def gen():
186            yield 1
187
188        thresholds = gc.get_threshold()
189
190        gc.callbacks.append(cb)
191        gc.set_threshold(1, 0, 0)
192        try:
193            gen()
194        finally:
195            gc.set_threshold(*thresholds)
196            gc.callbacks.pop()
197
198        class Sneaky:
199            def __del__(self):
200                inspect.stack()
201
202        sneaky = Sneaky()
203        sneaky._s = Sneaky()
204        sneaky._s._s = sneaky
205
206        gc.set_threshold(1, 0, 0)
207        try:
208            del sneaky
209            gen()
210        finally:
211            gc.set_threshold(*thresholds)
212
213    def test_ag_frame_f_back(self):
214        async def f():
215            yield
216        ag = f()
217        self.assertIsNone(ag.ag_frame.f_back)
218
219    def test_cr_frame_f_back(self):
220        async def f():
221            pass
222        cr = f()
223        self.assertIsNone(cr.cr_frame.f_back)
224        cr.close()  # Suppress RuntimeWarning.
225
226    def test_gi_frame_f_back(self):
227        def f():
228            yield
229        gi = f()
230        self.assertIsNone(gi.gi_frame.f_back)
231
232    def test_issue103488(self):
233
234        def gen_raises():
235            yield
236            raise ValueError()
237
238        def loop():
239            try:
240                for _ in gen_raises():
241                    if True is False:
242                        return
243            except ValueError:
244                pass
245
246        #This should not raise
247        loop()
248
249
250class ModifyUnderlyingIterableTest(unittest.TestCase):
251    iterables = [
252        range(0),
253        range(20),
254        [1, 2, 3],
255        (2,),
256        {13, 48, 211},
257        frozenset((15, 8, 6)),
258        {1: 2, 3: 4},
259    ]
260
261    non_iterables = [
262        None,
263        42,
264        3.0,
265        2j,
266    ]
267
268    def genexpr(self):
269        return (x for x in range(10))
270
271    def genfunc(self):
272        def gen(it):
273            for x in it:
274                yield x
275        return gen(range(10))
276
277    def process_tests(self, get_generator):
278        for obj in self.iterables:
279            g_obj = get_generator(obj)
280            with self.subTest(g_obj=g_obj, obj=obj):
281                self.assertListEqual(list(g_obj), list(obj))
282
283            g_iter = get_generator(iter(obj))
284            with self.subTest(g_iter=g_iter, obj=obj):
285                self.assertListEqual(list(g_iter), list(obj))
286
287        err_regex = "'.*' object is not iterable"
288        for obj in self.non_iterables:
289            g_obj = get_generator(obj)
290            with self.subTest(g_obj=g_obj):
291                self.assertRaisesRegex(TypeError, err_regex, list, g_obj)
292
293    def test_modify_f_locals(self):
294        def modify_f_locals(g, local, obj):
295            g.gi_frame.f_locals[local] = obj
296            return g
297
298        def get_generator_genexpr(obj):
299            return modify_f_locals(self.genexpr(), '.0', obj)
300
301        def get_generator_genfunc(obj):
302            return modify_f_locals(self.genfunc(), 'it', obj)
303
304        self.process_tests(get_generator_genexpr)
305        self.process_tests(get_generator_genfunc)
306
307    def test_new_gen_from_gi_code(self):
308        def new_gen_from_gi_code(g, obj):
309            generator_func = types.FunctionType(g.gi_code, {})
310            return generator_func(obj)
311
312        def get_generator_genexpr(obj):
313            return new_gen_from_gi_code(self.genexpr(), obj)
314
315        def get_generator_genfunc(obj):
316            return new_gen_from_gi_code(self.genfunc(), obj)
317
318        self.process_tests(get_generator_genexpr)
319        self.process_tests(get_generator_genfunc)
320
321
322class ExceptionTest(unittest.TestCase):
323    # Tests for the issue #23353: check that the currently handled exception
324    # is correctly saved/restored in PyEval_EvalFrameEx().
325
326    def test_except_throw(self):
327        def store_raise_exc_generator():
328            try:
329                self.assertIsNone(sys.exception())
330                yield
331            except Exception as exc:
332                # exception raised by gen.throw(exc)
333                self.assertIsInstance(sys.exception(), ValueError)
334                self.assertIsNone(exc.__context__)
335                yield
336
337                # ensure that the exception is not lost
338                self.assertIsInstance(sys.exception(), ValueError)
339                yield
340
341                # we should be able to raise back the ValueError
342                raise
343
344        make = store_raise_exc_generator()
345        next(make)
346
347        try:
348            raise ValueError()
349        except Exception as exc:
350            try:
351                make.throw(exc)
352            except Exception:
353                pass
354
355        next(make)
356        with self.assertRaises(ValueError) as cm:
357            next(make)
358        self.assertIsNone(cm.exception.__context__)
359
360        self.assertIsNone(sys.exception())
361
362    def test_except_next(self):
363        def gen():
364            self.assertIsInstance(sys.exception(), ValueError)
365            yield "done"
366
367        g = gen()
368        try:
369            raise ValueError
370        except Exception:
371            self.assertEqual(next(g), "done")
372        self.assertIsNone(sys.exception())
373
374    def test_except_gen_except(self):
375        def gen():
376            try:
377                self.assertIsNone(sys.exception())
378                yield
379                # we are called from "except ValueError:", TypeError must
380                # inherit ValueError in its context
381                raise TypeError()
382            except TypeError as exc:
383                self.assertIsInstance(sys.exception(), TypeError)
384                self.assertEqual(type(exc.__context__), ValueError)
385            # here we are still called from the "except ValueError:"
386            self.assertIsInstance(sys.exception(), ValueError)
387            yield
388            self.assertIsNone(sys.exception())
389            yield "done"
390
391        g = gen()
392        next(g)
393        try:
394            raise ValueError
395        except Exception:
396            next(g)
397
398        self.assertEqual(next(g), "done")
399        self.assertIsNone(sys.exception())
400
401    def test_nested_gen_except_loop(self):
402        def gen():
403            for i in range(100):
404                self.assertIsInstance(sys.exception(), TypeError)
405                yield "doing"
406
407        def outer():
408            try:
409                raise TypeError
410            except:
411                for x in gen():
412                    yield x
413
414        try:
415            raise ValueError
416        except Exception:
417            for x in outer():
418                self.assertEqual(x, "doing")
419        self.assertEqual(sys.exception(), None)
420
421    def test_except_throw_exception_context(self):
422        def gen():
423            try:
424                try:
425                    self.assertIsNone(sys.exception())
426                    yield
427                except ValueError:
428                    # we are called from "except ValueError:"
429                    self.assertIsInstance(sys.exception(), ValueError)
430                    raise TypeError()
431            except Exception as exc:
432                self.assertIsInstance(sys.exception(), TypeError)
433                self.assertEqual(type(exc.__context__), ValueError)
434            # we are still called from "except ValueError:"
435            self.assertIsInstance(sys.exception(), ValueError)
436            yield
437            self.assertIsNone(sys.exception())
438            yield "done"
439
440        g = gen()
441        next(g)
442        try:
443            raise ValueError
444        except Exception as exc:
445            g.throw(exc)
446
447        self.assertEqual(next(g), "done")
448        self.assertIsNone(sys.exception())
449
450    def test_except_throw_bad_exception(self):
451        class E(Exception):
452            def __new__(cls, *args, **kwargs):
453                return cls
454
455        def boring_generator():
456            yield
457
458        gen = boring_generator()
459
460        err_msg = 'should have returned an instance of BaseException'
461
462        with self.assertRaisesRegex(TypeError, err_msg):
463            gen.throw(E)
464
465        self.assertRaises(StopIteration, next, gen)
466
467        def generator():
468            with self.assertRaisesRegex(TypeError, err_msg):
469                yield
470
471        gen = generator()
472        next(gen)
473        with self.assertRaises(StopIteration):
474            gen.throw(E)
475
476    def test_gen_3_arg_deprecation_warning(self):
477        def g():
478            yield 42
479
480        gen = g()
481        with self.assertWarns(DeprecationWarning):
482            with self.assertRaises(TypeError):
483                gen.throw(TypeError, TypeError(24), None)
484
485    def test_stopiteration_error(self):
486        # See also PEP 479.
487
488        def gen():
489            raise StopIteration
490            yield
491
492        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
493            next(gen())
494
495    def test_tutorial_stopiteration(self):
496        # Raise StopIteration" stops the generator too:
497
498        def f():
499            yield 1
500            raise StopIteration
501            yield 2 # never reached
502
503        g = f()
504        self.assertEqual(next(g), 1)
505
506        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
507            next(g)
508
509    def test_return_tuple(self):
510        def g():
511            return (yield 1)
512
513        gen = g()
514        self.assertEqual(next(gen), 1)
515        with self.assertRaises(StopIteration) as cm:
516            gen.send((2,))
517        self.assertEqual(cm.exception.value, (2,))
518
519    def test_return_stopiteration(self):
520        def g():
521            return (yield 1)
522
523        gen = g()
524        self.assertEqual(next(gen), 1)
525        with self.assertRaises(StopIteration) as cm:
526            gen.send(StopIteration(2))
527        self.assertIsInstance(cm.exception.value, StopIteration)
528        self.assertEqual(cm.exception.value.value, 2)
529
530
531class GeneratorCloseTest(unittest.TestCase):
532
533    def test_close_no_return_value(self):
534        def f():
535            yield
536
537        gen = f()
538        gen.send(None)
539        self.assertIsNone(gen.close())
540
541    def test_close_return_value(self):
542        def f():
543            try:
544                yield
545                # close() raises GeneratorExit here, which is caught
546            except GeneratorExit:
547                return 0
548
549        gen = f()
550        gen.send(None)
551        self.assertEqual(gen.close(), 0)
552
553    def test_close_not_catching_exit(self):
554        def f():
555            yield
556            # close() raises GeneratorExit here, which isn't caught and
557            # therefore propagates -- no return value
558            return 0
559
560        gen = f()
561        gen.send(None)
562        self.assertIsNone(gen.close())
563
564    def test_close_not_started(self):
565        def f():
566            try:
567                yield
568            except GeneratorExit:
569                return 0
570
571        gen = f()
572        self.assertIsNone(gen.close())
573
574    def test_close_exhausted(self):
575        def f():
576            try:
577                yield
578            except GeneratorExit:
579                return 0
580
581        gen = f()
582        next(gen)
583        with self.assertRaises(StopIteration):
584            next(gen)
585        self.assertIsNone(gen.close())
586
587    def test_close_closed(self):
588        def f():
589            try:
590                yield
591            except GeneratorExit:
592                return 0
593
594        gen = f()
595        gen.send(None)
596        self.assertEqual(gen.close(), 0)
597        self.assertIsNone(gen.close())
598
599    def test_close_raises(self):
600        def f():
601            try:
602                yield
603            except GeneratorExit:
604                pass
605            raise RuntimeError
606
607        gen = f()
608        gen.send(None)
609        with self.assertRaises(RuntimeError):
610            gen.close()
611
612    def test_close_releases_frame_locals(self):
613        # See gh-118272
614
615        class Foo:
616            pass
617
618        f = Foo()
619        f_wr = weakref.ref(f)
620
621        def genfn():
622            a = f
623            yield
624
625        g = genfn()
626        next(g)
627        del f
628        g.close()
629        support.gc_collect()
630        self.assertIsNone(f_wr())
631
632
633class GeneratorThrowTest(unittest.TestCase):
634
635    def test_exception_context_with_yield(self):
636        def f():
637            try:
638                raise KeyError('a')
639            except Exception:
640                yield
641
642        gen = f()
643        gen.send(None)
644        with self.assertRaises(ValueError) as cm:
645            gen.throw(ValueError)
646        context = cm.exception.__context__
647        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
648
649    def test_exception_context_with_yield_inside_generator(self):
650        # Check that the context is also available from inside the generator
651        # with yield, as opposed to outside.
652        def f():
653            try:
654                raise KeyError('a')
655            except Exception:
656                try:
657                    yield
658                except Exception as exc:
659                    self.assertEqual(type(exc), ValueError)
660                    context = exc.__context__
661                    self.assertEqual((type(context), context.args),
662                        (KeyError, ('a',)))
663                    yield 'b'
664
665        gen = f()
666        gen.send(None)
667        actual = gen.throw(ValueError)
668        # This ensures that the assertions inside were executed.
669        self.assertEqual(actual, 'b')
670
671    def test_exception_context_with_yield_from(self):
672        def f():
673            yield
674
675        def g():
676            try:
677                raise KeyError('a')
678            except Exception:
679                yield from f()
680
681        gen = g()
682        gen.send(None)
683        with self.assertRaises(ValueError) as cm:
684            gen.throw(ValueError)
685        context = cm.exception.__context__
686        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
687
688    def test_exception_context_with_yield_from_with_context_cycle(self):
689        # Check trying to create an exception context cycle:
690        # https://bugs.python.org/issue40696
691        has_cycle = None
692
693        def f():
694            yield
695
696        def g(exc):
697            nonlocal has_cycle
698            try:
699                raise exc
700            except Exception:
701                try:
702                    yield from f()
703                except Exception as exc:
704                    has_cycle = (exc is exc.__context__)
705            yield
706
707        exc = KeyError('a')
708        gen = g(exc)
709        gen.send(None)
710        gen.throw(exc)
711        # This also distinguishes from the initial has_cycle=None.
712        self.assertEqual(has_cycle, False)
713
714    def test_throw_after_none_exc_type(self):
715        def g():
716            try:
717                raise KeyError
718            except KeyError:
719                pass
720
721            try:
722                yield
723            except Exception:
724                raise RuntimeError
725
726        gen = g()
727        gen.send(None)
728        with self.assertRaises(RuntimeError) as cm:
729            gen.throw(ValueError)
730
731
732class GeneratorStackTraceTest(unittest.TestCase):
733
734    def check_stack_names(self, frame, expected):
735        names = []
736        while frame:
737            name = frame.f_code.co_name
738            # Stop checking frames when we get to our test helper.
739            if name.startswith('check_') or name.startswith('call_'):
740                break
741
742            names.append(name)
743            frame = frame.f_back
744
745        self.assertEqual(names, expected)
746
747    def check_yield_from_example(self, call_method):
748        def f():
749            self.check_stack_names(sys._getframe(), ['f', 'g'])
750            try:
751                yield
752            except Exception:
753                pass
754            self.check_stack_names(sys._getframe(), ['f', 'g'])
755
756        def g():
757            self.check_stack_names(sys._getframe(), ['g'])
758            yield from f()
759            self.check_stack_names(sys._getframe(), ['g'])
760
761        gen = g()
762        gen.send(None)
763        try:
764            call_method(gen)
765        except StopIteration:
766            pass
767
768    def test_send_with_yield_from(self):
769        def call_send(gen):
770            gen.send(None)
771
772        self.check_yield_from_example(call_send)
773
774    def test_throw_with_yield_from(self):
775        def call_throw(gen):
776            gen.throw(RuntimeError)
777
778        self.check_yield_from_example(call_throw)
779
780
781class YieldFromTests(unittest.TestCase):
782    def test_generator_gi_yieldfrom(self):
783        def a():
784            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
785            self.assertIsNone(gen_b.gi_yieldfrom)
786            yield
787            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
788            self.assertIsNone(gen_b.gi_yieldfrom)
789
790        def b():
791            self.assertIsNone(gen_b.gi_yieldfrom)
792            yield from a()
793            self.assertIsNone(gen_b.gi_yieldfrom)
794            yield
795            self.assertIsNone(gen_b.gi_yieldfrom)
796
797        gen_b = b()
798        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
799        self.assertIsNone(gen_b.gi_yieldfrom)
800
801        gen_b.send(None)
802        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
803        self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
804
805        gen_b.send(None)
806        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
807        self.assertIsNone(gen_b.gi_yieldfrom)
808
809        [] = gen_b  # Exhaust generator
810        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
811        self.assertIsNone(gen_b.gi_yieldfrom)
812
813
814tutorial_tests = """
815Let's try a simple generator:
816
817    >>> def f():
818    ...    yield 1
819    ...    yield 2
820
821    >>> for i in f():
822    ...     print(i)
823    1
824    2
825    >>> g = f()
826    >>> next(g)
827    1
828    >>> next(g)
829    2
830
831"Falling off the end" stops the generator:
832
833    >>> next(g)
834    Traceback (most recent call last):
835      File "<stdin>", line 1, in ?
836      File "<stdin>", line 2, in g
837    StopIteration
838
839"return" also stops the generator:
840
841    >>> def f():
842    ...     yield 1
843    ...     return
844    ...     yield 2 # never reached
845    ...
846    >>> g = f()
847    >>> next(g)
848    1
849    >>> next(g)
850    Traceback (most recent call last):
851      File "<stdin>", line 1, in ?
852      File "<stdin>", line 3, in f
853    StopIteration
854    >>> next(g) # once stopped, can't be resumed
855    Traceback (most recent call last):
856      File "<stdin>", line 1, in ?
857    StopIteration
858
859However, "return" and StopIteration are not exactly equivalent:
860
861    >>> def g1():
862    ...     try:
863    ...         return
864    ...     except:
865    ...         yield 1
866    ...
867    >>> list(g1())
868    []
869
870    >>> def g2():
871    ...     try:
872    ...         raise StopIteration
873    ...     except:
874    ...         yield 42
875    >>> print(list(g2()))
876    [42]
877
878This may be surprising at first:
879
880    >>> def g3():
881    ...     try:
882    ...         return
883    ...     finally:
884    ...         yield 1
885    ...
886    >>> list(g3())
887    [1]
888
889Let's create an alternate range() function implemented as a generator:
890
891    >>> def yrange(n):
892    ...     for i in range(n):
893    ...         yield i
894    ...
895    >>> list(yrange(5))
896    [0, 1, 2, 3, 4]
897
898Generators always return to the most recent caller:
899
900    >>> def creator():
901    ...     r = yrange(5)
902    ...     print("creator", next(r))
903    ...     return r
904    ...
905    >>> def caller():
906    ...     r = creator()
907    ...     for i in r:
908    ...             print("caller", i)
909    ...
910    >>> caller()
911    creator 0
912    caller 1
913    caller 2
914    caller 3
915    caller 4
916
917Generators can call other generators:
918
919    >>> def zrange(n):
920    ...     for i in yrange(n):
921    ...         yield i
922    ...
923    >>> list(zrange(5))
924    [0, 1, 2, 3, 4]
925
926"""
927
928# The examples from PEP 255.
929
930pep_tests = """
931
932Specification:  Yield
933
934    Restriction:  A generator cannot be resumed while it is actively
935    running:
936
937    >>> def g():
938    ...     i = next(me)
939    ...     yield i
940    >>> me = g()
941    >>> next(me)
942    Traceback (most recent call last):
943     ...
944      File "<string>", line 2, in g
945    ValueError: generator already executing
946
947Specification: Return
948
949    Note that return isn't always equivalent to raising StopIteration:  the
950    difference lies in how enclosing try/except constructs are treated.
951    For example,
952
953        >>> def f1():
954        ...     try:
955        ...         return
956        ...     except:
957        ...        yield 1
958        >>> print(list(f1()))
959        []
960
961    because, as in any function, return simply exits, but
962
963        >>> def f2():
964        ...     try:
965        ...         raise StopIteration
966        ...     except:
967        ...         yield 42
968        >>> print(list(f2()))
969        [42]
970
971    because StopIteration is captured by a bare "except", as is any
972    exception.
973
974Specification: Generators and Exception Propagation
975
976    >>> def f():
977    ...     return 1//0
978    >>> def g():
979    ...     yield f()  # the zero division exception propagates
980    ...     yield 42   # and we'll never get here
981    >>> k = g()
982    >>> next(k)
983    Traceback (most recent call last):
984      File "<stdin>", line 1, in ?
985      File "<stdin>", line 2, in g
986      File "<stdin>", line 2, in f
987    ZeroDivisionError: integer division or modulo by zero
988    >>> next(k)  # and the generator cannot be resumed
989    Traceback (most recent call last):
990      File "<stdin>", line 1, in ?
991    StopIteration
992    >>>
993
994Specification: Try/Except/Finally
995
996    >>> def f():
997    ...     try:
998    ...         yield 1
999    ...         try:
1000    ...             yield 2
1001    ...             1//0
1002    ...             yield 3  # never get here
1003    ...         except ZeroDivisionError:
1004    ...             yield 4
1005    ...             yield 5
1006    ...             raise
1007    ...         except:
1008    ...             yield 6
1009    ...         yield 7     # the "raise" above stops this
1010    ...     except:
1011    ...         yield 8
1012    ...     yield 9
1013    ...     try:
1014    ...         x = 12
1015    ...     finally:
1016    ...         yield 10
1017    ...     yield 11
1018    >>> print(list(f()))
1019    [1, 2, 4, 5, 8, 9, 10, 11]
1020    >>>
1021
1022Guido's binary tree example.
1023
1024    >>> # A binary tree class.
1025    >>> class Tree:
1026    ...
1027    ...     def __init__(self, label, left=None, right=None):
1028    ...         self.label = label
1029    ...         self.left = left
1030    ...         self.right = right
1031    ...
1032    ...     def __repr__(self, level=0, indent="    "):
1033    ...         s = level*indent + repr(self.label)
1034    ...         if self.left:
1035    ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
1036    ...         if self.right:
1037    ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
1038    ...         return s
1039    ...
1040    ...     def __iter__(self):
1041    ...         return inorder(self)
1042
1043    >>> # Create a Tree from a list.
1044    >>> def tree(list):
1045    ...     n = len(list)
1046    ...     if n == 0:
1047    ...         return []
1048    ...     i = n // 2
1049    ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
1050
1051    >>> # Show it off: create a tree.
1052    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
1053
1054    >>> # A recursive generator that generates Tree labels in in-order.
1055    >>> def inorder(t):
1056    ...     if t:
1057    ...         for x in inorder(t.left):
1058    ...             yield x
1059    ...         yield t.label
1060    ...         for x in inorder(t.right):
1061    ...             yield x
1062
1063    >>> # Show it off: create a tree.
1064    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
1065    >>> # Print the nodes of the tree in in-order.
1066    >>> for x in t:
1067    ...     print(' '+x, end='')
1068     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
1069
1070    >>> # A non-recursive generator.
1071    >>> def inorder(node):
1072    ...     stack = []
1073    ...     while node:
1074    ...         while node.left:
1075    ...             stack.append(node)
1076    ...             node = node.left
1077    ...         yield node.label
1078    ...         while not node.right:
1079    ...             try:
1080    ...                 node = stack.pop()
1081    ...             except IndexError:
1082    ...                 return
1083    ...             yield node.label
1084    ...         node = node.right
1085
1086    >>> # Exercise the non-recursive generator.
1087    >>> for x in t:
1088    ...     print(' '+x, end='')
1089     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
1090
1091"""
1092
1093# Examples from Iterator-List and Python-Dev and c.l.py.
1094
1095email_tests = """
1096
1097The difference between yielding None and returning it.
1098
1099>>> def g():
1100...     for i in range(3):
1101...         yield None
1102...     yield None
1103...     return
1104>>> list(g())
1105[None, None, None, None]
1106
1107Ensure that explicitly raising StopIteration acts like any other exception
1108in try/except, not like a return.
1109
1110>>> def g():
1111...     yield 1
1112...     try:
1113...         raise StopIteration
1114...     except:
1115...         yield 2
1116...     yield 3
1117>>> list(g())
1118[1, 2, 3]
1119
1120Next one was posted to c.l.py.
1121
1122>>> def gcomb(x, k):
1123...     "Generate all combinations of k elements from list x."
1124...
1125...     if k > len(x):
1126...         return
1127...     if k == 0:
1128...         yield []
1129...     else:
1130...         first, rest = x[0], x[1:]
1131...         # A combination does or doesn't contain first.
1132...         # If it does, the remainder is a k-1 comb of rest.
1133...         for c in gcomb(rest, k-1):
1134...             c.insert(0, first)
1135...             yield c
1136...         # If it doesn't contain first, it's a k comb of rest.
1137...         for c in gcomb(rest, k):
1138...             yield c
1139
1140>>> seq = list(range(1, 5))
1141>>> for k in range(len(seq) + 2):
1142...     print("%d-combs of %s:" % (k, seq))
1143...     for c in gcomb(seq, k):
1144...         print("   ", c)
11450-combs of [1, 2, 3, 4]:
1146    []
11471-combs of [1, 2, 3, 4]:
1148    [1]
1149    [2]
1150    [3]
1151    [4]
11522-combs of [1, 2, 3, 4]:
1153    [1, 2]
1154    [1, 3]
1155    [1, 4]
1156    [2, 3]
1157    [2, 4]
1158    [3, 4]
11593-combs of [1, 2, 3, 4]:
1160    [1, 2, 3]
1161    [1, 2, 4]
1162    [1, 3, 4]
1163    [2, 3, 4]
11644-combs of [1, 2, 3, 4]:
1165    [1, 2, 3, 4]
11665-combs of [1, 2, 3, 4]:
1167
1168From the Iterators list, about the types of these things.
1169
1170>>> def g():
1171...     yield 1
1172...
1173>>> type(g)
1174<class 'function'>
1175>>> i = g()
1176>>> type(i)
1177<class 'generator'>
1178>>> [s for s in dir(i) if not s.startswith('_')]
1179['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_suspended', 'gi_yieldfrom', 'send', 'throw']
1180>>> from test.support import HAVE_DOCSTRINGS
1181>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
1182Implement next(self).
1183>>> iter(i) is i
1184True
1185>>> import types
1186>>> isinstance(i, types.GeneratorType)
1187True
1188
1189And more, added later.
1190
1191>>> i.gi_running
11920
1193>>> type(i.gi_frame)
1194<class 'frame'>
1195>>> i.gi_running = 42
1196Traceback (most recent call last):
1197  ...
1198AttributeError: attribute 'gi_running' of 'generator' objects is not writable
1199>>> def g():
1200...     yield me.gi_running
1201>>> me = g()
1202>>> me.gi_running
12030
1204>>> next(me)
12051
1206>>> me.gi_running
12070
1208
1209A clever union-find implementation from c.l.py, due to David Eppstein.
1210Sent: Friday, June 29, 2001 12:16 PM
1211To: python-list@python.org
1212Subject: Re: PEP 255: Simple Generators
1213
1214>>> class disjointSet:
1215...     def __init__(self, name):
1216...         self.name = name
1217...         self.parent = None
1218...         self.generator = self.generate()
1219...
1220...     def generate(self):
1221...         while not self.parent:
1222...             yield self
1223...         for x in self.parent.generator:
1224...             yield x
1225...
1226...     def find(self):
1227...         return next(self.generator)
1228...
1229...     def union(self, parent):
1230...         if self.parent:
1231...             raise ValueError("Sorry, I'm not a root!")
1232...         self.parent = parent
1233...
1234...     def __str__(self):
1235...         return self.name
1236
1237>>> names = "ABCDEFGHIJKLM"
1238>>> sets = [disjointSet(name) for name in names]
1239>>> roots = sets[:]
1240
1241>>> import random
1242>>> gen = random.Random(42)
1243>>> while 1:
1244...     for s in sets:
1245...         print(" %s->%s" % (s, s.find()), end='')
1246...     print()
1247...     if len(roots) > 1:
1248...         s1 = gen.choice(roots)
1249...         roots.remove(s1)
1250...         s2 = gen.choice(roots)
1251...         s1.union(s2)
1252...         print("merged", s1, "into", s2)
1253...     else:
1254...         break
1255 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
1256merged K into B
1257 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
1258merged A into F
1259 A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
1260merged E into F
1261 A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
1262merged D into C
1263 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
1264merged M into C
1265 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
1266merged J into B
1267 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
1268merged B into C
1269 A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
1270merged F into G
1271 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
1272merged L into C
1273 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
1274merged G into I
1275 A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
1276merged I into H
1277 A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
1278merged C into H
1279 A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
1280
1281"""
1282# Emacs turd '
1283
1284# Fun tests (for sufficiently warped notions of "fun").
1285
1286fun_tests = """
1287
1288Build up to a recursive Sieve of Eratosthenes generator.
1289
1290>>> def firstn(g, n):
1291...     return [next(g) for i in range(n)]
1292
1293>>> def intsfrom(i):
1294...     while 1:
1295...         yield i
1296...         i += 1
1297
1298>>> firstn(intsfrom(5), 7)
1299[5, 6, 7, 8, 9, 10, 11]
1300
1301>>> def exclude_multiples(n, ints):
1302...     for i in ints:
1303...         if i % n:
1304...             yield i
1305
1306>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
1307[1, 2, 4, 5, 7, 8]
1308
1309>>> def sieve(ints):
1310...     prime = next(ints)
1311...     yield prime
1312...     not_divisible_by_prime = exclude_multiples(prime, ints)
1313...     for p in sieve(not_divisible_by_prime):
1314...         yield p
1315
1316>>> primes = sieve(intsfrom(2))
1317>>> firstn(primes, 20)
1318[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
1319
1320
1321Another famous problem:  generate all integers of the form
1322    2**i * 3**j  * 5**k
1323in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
1324Try writing it without generators, and correctly, and without generating
13253 internal results for each result output.
1326
1327>>> def times(n, g):
1328...     for i in g:
1329...         yield n * i
1330>>> firstn(times(10, intsfrom(1)), 10)
1331[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
1332
1333>>> def merge(g, h):
1334...     ng = next(g)
1335...     nh = next(h)
1336...     while 1:
1337...         if ng < nh:
1338...             yield ng
1339...             ng = next(g)
1340...         elif ng > nh:
1341...             yield nh
1342...             nh = next(h)
1343...         else:
1344...             yield ng
1345...             ng = next(g)
1346...             nh = next(h)
1347
1348The following works, but is doing a whale of a lot of redundant work --
1349it's not clear how to get the internal uses of m235 to share a single
1350generator.  Note that me_times2 (etc) each need to see every element in the
1351result sequence.  So this is an example where lazy lists are more natural
1352(you can look at the head of a lazy list any number of times).
1353
1354>>> def m235():
1355...     yield 1
1356...     me_times2 = times(2, m235())
1357...     me_times3 = times(3, m235())
1358...     me_times5 = times(5, m235())
1359...     for i in merge(merge(me_times2,
1360...                          me_times3),
1361...                    me_times5):
1362...         yield i
1363
1364Don't print "too many" of these -- the implementation above is extremely
1365inefficient:  each call of m235() leads to 3 recursive calls, and in
1366turn each of those 3 more, and so on, and so on, until we've descended
1367enough levels to satisfy the print stmts.  Very odd:  when I printed 5
1368lines of results below, this managed to screw up Win98's malloc in "the
1369usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
1370address space, and it *looked* like a very slow leak.
1371
1372>>> result = m235()
1373>>> for i in range(3):
1374...     print(firstn(result, 15))
1375[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1376[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1377[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1378
1379Heh.  Here's one way to get a shared list, complete with an excruciating
1380namespace renaming trick.  The *pretty* part is that the times() and merge()
1381functions can be reused as-is, because they only assume their stream
1382arguments are iterable -- a LazyList is the same as a generator to times().
1383
1384>>> class LazyList:
1385...     def __init__(self, g):
1386...         self.sofar = []
1387...         self.fetch = g.__next__
1388...
1389...     def __getitem__(self, i):
1390...         sofar, fetch = self.sofar, self.fetch
1391...         while i >= len(sofar):
1392...             sofar.append(fetch())
1393...         return sofar[i]
1394
1395>>> def m235():
1396...     yield 1
1397...     # Gack:  m235 below actually refers to a LazyList.
1398...     me_times2 = times(2, m235)
1399...     me_times3 = times(3, m235)
1400...     me_times5 = times(5, m235)
1401...     for i in merge(merge(me_times2,
1402...                          me_times3),
1403...                    me_times5):
1404...         yield i
1405
1406Print as many of these as you like -- *this* implementation is memory-
1407efficient.
1408
1409>>> m235 = LazyList(m235())
1410>>> for i in range(5):
1411...     print([m235[j] for j in range(15*i, 15*(i+1))])
1412[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1413[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1414[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1415[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1416[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1417
1418Ye olde Fibonacci generator, LazyList style.
1419
1420>>> def fibgen(a, b):
1421...
1422...     def sum(g, h):
1423...         while 1:
1424...             yield next(g) + next(h)
1425...
1426...     def tail(g):
1427...         next(g)    # throw first away
1428...         for x in g:
1429...             yield x
1430...
1431...     yield a
1432...     yield b
1433...     for s in sum(iter(fib),
1434...                  tail(iter(fib))):
1435...         yield s
1436
1437>>> fib = LazyList(fibgen(1, 2))
1438>>> firstn(iter(fib), 17)
1439[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1440
1441
1442Running after your tail with itertools.tee (new in version 2.4)
1443
1444The algorithms "m235" (Hamming) and Fibonacci presented above are both
1445examples of a whole family of FP (functional programming) algorithms
1446where a function produces and returns a list while the production algorithm
1447suppose the list as already produced by recursively calling itself.
1448For these algorithms to work, they must:
1449
1450- produce at least a first element without presupposing the existence of
1451  the rest of the list
1452- produce their elements in a lazy manner
1453
1454To work efficiently, the beginning of the list must not be recomputed over
1455and over again. This is ensured in most FP languages as a built-in feature.
1456In python, we have to explicitly maintain a list of already computed results
1457and abandon genuine recursivity.
1458
1459This is what had been attempted above with the LazyList class. One problem
1460with that class is that it keeps a list of all of the generated results and
1461therefore continually grows. This partially defeats the goal of the generator
1462concept, viz. produce the results only as needed instead of producing them
1463all and thereby wasting memory.
1464
1465Thanks to itertools.tee, it is now clear "how to get the internal uses of
1466m235 to share a single generator".
1467
1468>>> from itertools import tee
1469>>> def m235():
1470...     def _m235():
1471...         yield 1
1472...         for n in merge(times(2, m2),
1473...                        merge(times(3, m3),
1474...                              times(5, m5))):
1475...             yield n
1476...     m1 = _m235()
1477...     m2, m3, m5, mRes = tee(m1, 4)
1478...     return mRes
1479
1480>>> it = m235()
1481>>> for i in range(5):
1482...     print(firstn(it, 15))
1483[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1484[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1485[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1486[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1487[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1488
1489The "tee" function does just what we want. It internally keeps a generated
1490result for as long as it has not been "consumed" from all of the duplicated
1491iterators, whereupon it is deleted. You can therefore print the hamming
1492sequence during hours without increasing memory usage, or very little.
1493
1494The beauty of it is that recursive running-after-their-tail FP algorithms
1495are quite straightforwardly expressed with this Python idiom.
1496
1497Ye olde Fibonacci generator, tee style.
1498
1499>>> def fib():
1500...
1501...     def _isum(g, h):
1502...         while 1:
1503...             yield next(g) + next(h)
1504...
1505...     def _fib():
1506...         yield 1
1507...         yield 2
1508...         next(fibTail) # throw first away
1509...         for res in _isum(fibHead, fibTail):
1510...             yield res
1511...
1512...     realfib = _fib()
1513...     fibHead, fibTail, fibRes = tee(realfib, 3)
1514...     return fibRes
1515
1516>>> firstn(fib(), 17)
1517[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1518
1519"""
1520
1521# syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
1522# hackery.
1523
1524syntax_tests = """
1525
1526These are fine:
1527
1528>>> def f():
1529...     yield 1
1530...     return
1531
1532>>> def f():
1533...     try:
1534...         yield 1
1535...     finally:
1536...         pass
1537
1538>>> def f():
1539...     try:
1540...         try:
1541...             1//0
1542...         except ZeroDivisionError:
1543...             yield 666
1544...         except:
1545...             pass
1546...     finally:
1547...         pass
1548
1549>>> def f():
1550...     try:
1551...         try:
1552...             yield 12
1553...             1//0
1554...         except ZeroDivisionError:
1555...             yield 666
1556...         except:
1557...             try:
1558...                 x = 12
1559...             finally:
1560...                 yield 12
1561...     except:
1562...         return
1563>>> list(f())
1564[12, 666]
1565
1566>>> def f():
1567...    yield
1568>>> type(f())
1569<class 'generator'>
1570
1571
1572>>> def f():
1573...    if 0:
1574...        yield
1575>>> type(f())
1576<class 'generator'>
1577
1578
1579>>> def f():
1580...     if 0:
1581...         yield 1
1582>>> type(f())
1583<class 'generator'>
1584
1585>>> def f():
1586...    if "":
1587...        yield None
1588>>> type(f())
1589<class 'generator'>
1590
1591>>> def f():
1592...     return
1593...     try:
1594...         if x==4:
1595...             pass
1596...         elif 0:
1597...             try:
1598...                 1//0
1599...             except SyntaxError:
1600...                 pass
1601...             else:
1602...                 if 0:
1603...                     while 12:
1604...                         x += 1
1605...                         yield 2 # don't blink
1606...                         f(a, b, c, d, e)
1607...         else:
1608...             pass
1609...     except:
1610...         x = 1
1611...     return
1612>>> type(f())
1613<class 'generator'>
1614
1615>>> def f():
1616...     if 0:
1617...         def g():
1618...             yield 1
1619...
1620>>> type(f())
1621<class 'NoneType'>
1622
1623>>> def f():
1624...     if 0:
1625...         class C:
1626...             def __init__(self):
1627...                 yield 1
1628...             def f(self):
1629...                 yield 2
1630>>> type(f())
1631<class 'NoneType'>
1632
1633>>> def f():
1634...     if 0:
1635...         return
1636...     if 0:
1637...         yield 2
1638>>> type(f())
1639<class 'generator'>
1640
1641This one caused a crash (see SF bug 567538):
1642
1643>>> def f():
1644...     for i in range(3):
1645...         try:
1646...             continue
1647...         finally:
1648...             yield i
1649...
1650>>> g = f()
1651>>> print(next(g))
16520
1653>>> print(next(g))
16541
1655>>> print(next(g))
16562
1657>>> print(next(g))
1658Traceback (most recent call last):
1659StopIteration
1660
1661
1662Test the gi_code attribute
1663
1664>>> def f():
1665...     yield 5
1666...
1667>>> g = f()
1668>>> g.gi_code is f.__code__
1669True
1670>>> next(g)
16715
1672>>> next(g)
1673Traceback (most recent call last):
1674StopIteration
1675>>> g.gi_code is f.__code__
1676True
1677
1678
1679Test the __name__ attribute and the repr()
1680
1681>>> def f():
1682...    yield 5
1683...
1684>>> g = f()
1685>>> g.__name__
1686'f'
1687>>> repr(g)  # doctest: +ELLIPSIS
1688'<generator object f at ...>'
1689
1690Lambdas shouldn't have their usual return behavior.
1691
1692>>> x = lambda: (yield 1)
1693>>> list(x())
1694[1]
1695
1696>>> x = lambda: ((yield 1), (yield 2))
1697>>> list(x())
1698[1, 2]
1699"""
1700
1701# conjoin is a simple backtracking generator, named in honor of Icon's
1702# "conjunction" control structure.  Pass a list of no-argument functions
1703# that return iterable objects.  Easiest to explain by example:  assume the
1704# function list [x, y, z] is passed.  Then conjoin acts like:
1705#
1706# def g():
1707#     values = [None] * 3
1708#     for values[0] in x():
1709#         for values[1] in y():
1710#             for values[2] in z():
1711#                 yield values
1712#
1713# So some 3-lists of values *may* be generated, each time we successfully
1714# get into the innermost loop.  If an iterator fails (is exhausted) before
1715# then, it "backtracks" to get the next value from the nearest enclosing
1716# iterator (the one "to the left"), and starts all over again at the next
1717# slot (pumps a fresh iterator).  Of course this is most useful when the
1718# iterators have side-effects, so that which values *can* be generated at
1719# each slot depend on the values iterated at previous slots.
1720
1721def simple_conjoin(gs):
1722
1723    values = [None] * len(gs)
1724
1725    def gen(i):
1726        if i >= len(gs):
1727            yield values
1728        else:
1729            for values[i] in gs[i]():
1730                for x in gen(i+1):
1731                    yield x
1732
1733    for x in gen(0):
1734        yield x
1735
1736# That works fine, but recursing a level and checking i against len(gs) for
1737# each item produced is inefficient.  By doing manual loop unrolling across
1738# generator boundaries, it's possible to eliminate most of that overhead.
1739# This isn't worth the bother *in general* for generators, but conjoin() is
1740# a core building block for some CPU-intensive generator applications.
1741
1742def conjoin(gs):
1743
1744    n = len(gs)
1745    values = [None] * n
1746
1747    # Do one loop nest at time recursively, until the # of loop nests
1748    # remaining is divisible by 3.
1749
1750    def gen(i):
1751        if i >= n:
1752            yield values
1753
1754        elif (n-i) % 3:
1755            ip1 = i+1
1756            for values[i] in gs[i]():
1757                for x in gen(ip1):
1758                    yield x
1759
1760        else:
1761            for x in _gen3(i):
1762                yield x
1763
1764    # Do three loop nests at a time, recursing only if at least three more
1765    # remain.  Don't call directly:  this is an internal optimization for
1766    # gen's use.
1767
1768    def _gen3(i):
1769        assert i < n and (n-i) % 3 == 0
1770        ip1, ip2, ip3 = i+1, i+2, i+3
1771        g, g1, g2 = gs[i : ip3]
1772
1773        if ip3 >= n:
1774            # These are the last three, so we can yield values directly.
1775            for values[i] in g():
1776                for values[ip1] in g1():
1777                    for values[ip2] in g2():
1778                        yield values
1779
1780        else:
1781            # At least 6 loop nests remain; peel off 3 and recurse for the
1782            # rest.
1783            for values[i] in g():
1784                for values[ip1] in g1():
1785                    for values[ip2] in g2():
1786                        for x in _gen3(ip3):
1787                            yield x
1788
1789    for x in gen(0):
1790        yield x
1791
1792# And one more approach:  For backtracking apps like the Knight's Tour
1793# solver below, the number of backtracking levels can be enormous (one
1794# level per square, for the Knight's Tour, so that e.g. a 100x100 board
1795# needs 10,000 levels).  In such cases Python is likely to run out of
1796# stack space due to recursion.  So here's a recursion-free version of
1797# conjoin too.
1798# NOTE WELL:  This allows large problems to be solved with only trivial
1799# demands on stack space.  Without explicitly resumable generators, this is
1800# much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
1801# than the fancy unrolled recursive conjoin.
1802
1803def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
1804    n = len(gs)
1805    values = [None] * n
1806    iters  = [None] * n
1807    _StopIteration = StopIteration  # make local because caught a *lot*
1808    i = 0
1809    while 1:
1810        # Descend.
1811        try:
1812            while i < n:
1813                it = iters[i] = gs[i]().__next__
1814                values[i] = it()
1815                i += 1
1816        except _StopIteration:
1817            pass
1818        else:
1819            assert i == n
1820            yield values
1821
1822        # Backtrack until an older iterator can be resumed.
1823        i -= 1
1824        while i >= 0:
1825            try:
1826                values[i] = iters[i]()
1827                # Success!  Start fresh at next level.
1828                i += 1
1829                break
1830            except _StopIteration:
1831                # Continue backtracking.
1832                i -= 1
1833        else:
1834            assert i < 0
1835            break
1836
1837# A conjoin-based N-Queens solver.
1838
1839class Queens:
1840    def __init__(self, n):
1841        self.n = n
1842        rangen = range(n)
1843
1844        # Assign a unique int to each column and diagonal.
1845        # columns:  n of those, range(n).
1846        # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
1847        # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
1848        # based.
1849        # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
1850        # each, smallest i+j is 0, largest is 2n-2.
1851
1852        # For each square, compute a bit vector of the columns and
1853        # diagonals it covers, and for each row compute a function that
1854        # generates the possibilities for the columns in that row.
1855        self.rowgenerators = []
1856        for i in rangen:
1857            rowuses = [(1 << j) |                  # column ordinal
1858                       (1 << (n + i-j + n-1)) |    # NW-SE ordinal
1859                       (1 << (n + 2*n-1 + i+j))    # NE-SW ordinal
1860                            for j in rangen]
1861
1862            def rowgen(rowuses=rowuses):
1863                for j in rangen:
1864                    uses = rowuses[j]
1865                    if uses & self.used == 0:
1866                        self.used |= uses
1867                        yield j
1868                        self.used &= ~uses
1869
1870            self.rowgenerators.append(rowgen)
1871
1872    # Generate solutions.
1873    def solve(self):
1874        self.used = 0
1875        for row2col in conjoin(self.rowgenerators):
1876            yield row2col
1877
1878    def printsolution(self, row2col):
1879        n = self.n
1880        assert n == len(row2col)
1881        sep = "+" + "-+" * n
1882        print(sep)
1883        for i in range(n):
1884            squares = [" " for j in range(n)]
1885            squares[row2col[i]] = "Q"
1886            print("|" + "|".join(squares) + "|")
1887            print(sep)
1888
1889# A conjoin-based Knight's Tour solver.  This is pretty sophisticated
1890# (e.g., when used with flat_conjoin above, and passing hard=1 to the
1891# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
1892# creating 10s of thousands of generators then!), and is lengthy.
1893
1894class Knights:
1895    def __init__(self, m, n, hard=0):
1896        self.m, self.n = m, n
1897
1898        # solve() will set up succs[i] to be a list of square #i's
1899        # successors.
1900        succs = self.succs = []
1901
1902        # Remove i0 from each of its successor's successor lists, i.e.
1903        # successors can't go back to i0 again.  Return 0 if we can
1904        # detect this makes a solution impossible, else return 1.
1905
1906        def remove_from_successors(i0, len=len):
1907            # If we remove all exits from a free square, we're dead:
1908            # even if we move to it next, we can't leave it again.
1909            # If we create a square with one exit, we must visit it next;
1910            # else somebody else will have to visit it, and since there's
1911            # only one adjacent, there won't be a way to leave it again.
1912            # Finally, if we create more than one free square with a
1913            # single exit, we can only move to one of them next, leaving
1914            # the other one a dead end.
1915            ne0 = ne1 = 0
1916            for i in succs[i0]:
1917                s = succs[i]
1918                s.remove(i0)
1919                e = len(s)
1920                if e == 0:
1921                    ne0 += 1
1922                elif e == 1:
1923                    ne1 += 1
1924            return ne0 == 0 and ne1 < 2
1925
1926        # Put i0 back in each of its successor's successor lists.
1927
1928        def add_to_successors(i0):
1929            for i in succs[i0]:
1930                succs[i].append(i0)
1931
1932        # Generate the first move.
1933        def first():
1934            if m < 1 or n < 1:
1935                return
1936
1937            # Since we're looking for a cycle, it doesn't matter where we
1938            # start.  Starting in a corner makes the 2nd move easy.
1939            corner = self.coords2index(0, 0)
1940            remove_from_successors(corner)
1941            self.lastij = corner
1942            yield corner
1943            add_to_successors(corner)
1944
1945        # Generate the second moves.
1946        def second():
1947            corner = self.coords2index(0, 0)
1948            assert self.lastij == corner  # i.e., we started in the corner
1949            if m < 3 or n < 3:
1950                return
1951            assert len(succs[corner]) == 2
1952            assert self.coords2index(1, 2) in succs[corner]
1953            assert self.coords2index(2, 1) in succs[corner]
1954            # Only two choices.  Whichever we pick, the other must be the
1955            # square picked on move m*n, as it's the only way to get back
1956            # to (0, 0).  Save its index in self.final so that moves before
1957            # the last know it must be kept free.
1958            for i, j in (1, 2), (2, 1):
1959                this  = self.coords2index(i, j)
1960                final = self.coords2index(3-i, 3-j)
1961                self.final = final
1962
1963                remove_from_successors(this)
1964                succs[final].append(corner)
1965                self.lastij = this
1966                yield this
1967                succs[final].remove(corner)
1968                add_to_successors(this)
1969
1970        # Generate moves 3 through m*n-1.
1971        def advance(len=len):
1972            # If some successor has only one exit, must take it.
1973            # Else favor successors with fewer exits.
1974            candidates = []
1975            for i in succs[self.lastij]:
1976                e = len(succs[i])
1977                assert e > 0, "else remove_from_successors() pruning flawed"
1978                if e == 1:
1979                    candidates = [(e, i)]
1980                    break
1981                candidates.append((e, i))
1982            else:
1983                candidates.sort()
1984
1985            for e, i in candidates:
1986                if i != self.final:
1987                    if remove_from_successors(i):
1988                        self.lastij = i
1989                        yield i
1990                    add_to_successors(i)
1991
1992        # Generate moves 3 through m*n-1.  Alternative version using a
1993        # stronger (but more expensive) heuristic to order successors.
1994        # Since the # of backtracking levels is m*n, a poor move early on
1995        # can take eons to undo.  Smallest square board for which this
1996        # matters a lot is 52x52.
1997        def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
1998            # If some successor has only one exit, must take it.
1999            # Else favor successors with fewer exits.
2000            # Break ties via max distance from board centerpoint (favor
2001            # corners and edges whenever possible).
2002            candidates = []
2003            for i in succs[self.lastij]:
2004                e = len(succs[i])
2005                assert e > 0, "else remove_from_successors() pruning flawed"
2006                if e == 1:
2007                    candidates = [(e, 0, i)]
2008                    break
2009                i1, j1 = self.index2coords(i)
2010                d = (i1 - vmid)**2 + (j1 - hmid)**2
2011                candidates.append((e, -d, i))
2012            else:
2013                candidates.sort()
2014
2015            for e, d, i in candidates:
2016                if i != self.final:
2017                    if remove_from_successors(i):
2018                        self.lastij = i
2019                        yield i
2020                    add_to_successors(i)
2021
2022        # Generate the last move.
2023        def last():
2024            assert self.final in succs[self.lastij]
2025            yield self.final
2026
2027        if m*n < 4:
2028            self.squaregenerators = [first]
2029        else:
2030            self.squaregenerators = [first, second] + \
2031                [hard and advance_hard or advance] * (m*n - 3) + \
2032                [last]
2033
2034    def coords2index(self, i, j):
2035        assert 0 <= i < self.m
2036        assert 0 <= j < self.n
2037        return i * self.n + j
2038
2039    def index2coords(self, index):
2040        assert 0 <= index < self.m * self.n
2041        return divmod(index, self.n)
2042
2043    def _init_board(self):
2044        succs = self.succs
2045        del succs[:]
2046        m, n = self.m, self.n
2047        c2i = self.coords2index
2048
2049        offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
2050                   (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
2051        rangen = range(n)
2052        for i in range(m):
2053            for j in rangen:
2054                s = [c2i(i+io, j+jo) for io, jo in offsets
2055                                     if 0 <= i+io < m and
2056                                        0 <= j+jo < n]
2057                succs.append(s)
2058
2059    # Generate solutions.
2060    def solve(self):
2061        self._init_board()
2062        for x in conjoin(self.squaregenerators):
2063            yield x
2064
2065    def printsolution(self, x):
2066        m, n = self.m, self.n
2067        assert len(x) == m*n
2068        w = len(str(m*n))
2069        format = "%" + str(w) + "d"
2070
2071        squares = [[None] * n for i in range(m)]
2072        k = 1
2073        for i in x:
2074            i1, j1 = self.index2coords(i)
2075            squares[i1][j1] = format % k
2076            k += 1
2077
2078        sep = "+" + ("-" * w + "+") * n
2079        print(sep)
2080        for i in range(m):
2081            row = squares[i]
2082            print("|" + "|".join(row) + "|")
2083            print(sep)
2084
2085conjoin_tests = """
2086
2087Generate the 3-bit binary numbers in order.  This illustrates dumbest-
2088possible use of conjoin, just to generate the full cross-product.
2089
2090>>> for c in conjoin([lambda: iter((0, 1))] * 3):
2091...     print(c)
2092[0, 0, 0]
2093[0, 0, 1]
2094[0, 1, 0]
2095[0, 1, 1]
2096[1, 0, 0]
2097[1, 0, 1]
2098[1, 1, 0]
2099[1, 1, 1]
2100
2101For efficiency in typical backtracking apps, conjoin() yields the same list
2102object each time.  So if you want to save away a full account of its
2103generated sequence, you need to copy its results.
2104
2105>>> def gencopy(iterator):
2106...     for x in iterator:
2107...         yield x[:]
2108
2109>>> for n in range(10):
2110...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
2111...     print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
21120 1 True True
21131 2 True True
21142 4 True True
21153 8 True True
21164 16 True True
21175 32 True True
21186 64 True True
21197 128 True True
21208 256 True True
21219 512 True True
2122
2123And run an 8-queens solver.
2124
2125>>> q = Queens(8)
2126>>> LIMIT = 2
2127>>> count = 0
2128>>> for row2col in q.solve():
2129...     count += 1
2130...     if count <= LIMIT:
2131...         print("Solution", count)
2132...         q.printsolution(row2col)
2133Solution 1
2134+-+-+-+-+-+-+-+-+
2135|Q| | | | | | | |
2136+-+-+-+-+-+-+-+-+
2137| | | | |Q| | | |
2138+-+-+-+-+-+-+-+-+
2139| | | | | | | |Q|
2140+-+-+-+-+-+-+-+-+
2141| | | | | |Q| | |
2142+-+-+-+-+-+-+-+-+
2143| | |Q| | | | | |
2144+-+-+-+-+-+-+-+-+
2145| | | | | | |Q| |
2146+-+-+-+-+-+-+-+-+
2147| |Q| | | | | | |
2148+-+-+-+-+-+-+-+-+
2149| | | |Q| | | | |
2150+-+-+-+-+-+-+-+-+
2151Solution 2
2152+-+-+-+-+-+-+-+-+
2153|Q| | | | | | | |
2154+-+-+-+-+-+-+-+-+
2155| | | | | |Q| | |
2156+-+-+-+-+-+-+-+-+
2157| | | | | | | |Q|
2158+-+-+-+-+-+-+-+-+
2159| | |Q| | | | | |
2160+-+-+-+-+-+-+-+-+
2161| | | | | | |Q| |
2162+-+-+-+-+-+-+-+-+
2163| | | |Q| | | | |
2164+-+-+-+-+-+-+-+-+
2165| |Q| | | | | | |
2166+-+-+-+-+-+-+-+-+
2167| | | | |Q| | | |
2168+-+-+-+-+-+-+-+-+
2169
2170>>> print(count, "solutions in all.")
217192 solutions in all.
2172
2173And run a Knight's Tour on a 10x10 board.  Note that there are about
217420,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
2175
2176>>> k = Knights(10, 10)
2177>>> LIMIT = 2
2178>>> count = 0
2179>>> for x in k.solve():
2180...     count += 1
2181...     if count <= LIMIT:
2182...         print("Solution", count)
2183...         k.printsolution(x)
2184...     else:
2185...         break
2186Solution 1
2187+---+---+---+---+---+---+---+---+---+---+
2188|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
2189+---+---+---+---+---+---+---+---+---+---+
2190| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
2191+---+---+---+---+---+---+---+---+---+---+
2192| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
2193+---+---+---+---+---+---+---+---+---+---+
2194| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
2195+---+---+---+---+---+---+---+---+---+---+
2196| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
2197+---+---+---+---+---+---+---+---+---+---+
2198| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
2199+---+---+---+---+---+---+---+---+---+---+
2200| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
2201+---+---+---+---+---+---+---+---+---+---+
2202| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
2203+---+---+---+---+---+---+---+---+---+---+
2204| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
2205+---+---+---+---+---+---+---+---+---+---+
2206| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
2207+---+---+---+---+---+---+---+---+---+---+
2208Solution 2
2209+---+---+---+---+---+---+---+---+---+---+
2210|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
2211+---+---+---+---+---+---+---+---+---+---+
2212| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
2213+---+---+---+---+---+---+---+---+---+---+
2214| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
2215+---+---+---+---+---+---+---+---+---+---+
2216| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
2217+---+---+---+---+---+---+---+---+---+---+
2218| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
2219+---+---+---+---+---+---+---+---+---+---+
2220| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
2221+---+---+---+---+---+---+---+---+---+---+
2222| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
2223+---+---+---+---+---+---+---+---+---+---+
2224| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
2225+---+---+---+---+---+---+---+---+---+---+
2226| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
2227+---+---+---+---+---+---+---+---+---+---+
2228| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
2229+---+---+---+---+---+---+---+---+---+---+
2230"""
2231
2232weakref_tests = """\
2233Generators are weakly referencable:
2234
2235>>> import weakref
2236>>> def gen():
2237...     yield 'foo!'
2238...
2239>>> wr = weakref.ref(gen)
2240>>> wr() is gen
2241True
2242>>> p = weakref.proxy(gen)
2243
2244Generator-iterators are weakly referencable as well:
2245
2246>>> gi = gen()
2247>>> wr = weakref.ref(gi)
2248>>> wr() is gi
2249True
2250>>> p = weakref.proxy(gi)
2251>>> list(p)
2252['foo!']
2253
2254"""
2255
2256coroutine_tests = """\
2257>>> from test.support import gc_collect
2258
2259Sending a value into a started generator:
2260
2261>>> def f():
2262...     print((yield 1))
2263...     yield 2
2264>>> g = f()
2265>>> next(g)
22661
2267>>> g.send(42)
226842
22692
2270
2271Sending a value into a new generator produces a TypeError:
2272
2273>>> f().send("foo")
2274Traceback (most recent call last):
2275...
2276TypeError: can't send non-None value to a just-started generator
2277
2278
2279Yield by itself yields None:
2280
2281>>> def f(): yield
2282>>> list(f())
2283[None]
2284
2285
2286Yield is allowed only in the outermost iterable in generator expression:
2287
2288>>> def f(): list(i for i in [(yield 26)])
2289>>> type(f())
2290<class 'generator'>
2291
2292
2293A yield expression with augmented assignment.
2294
2295>>> def coroutine(seq):
2296...     count = 0
2297...     while count < 200:
2298...         count += yield
2299...         seq.append(count)
2300>>> seq = []
2301>>> c = coroutine(seq)
2302>>> next(c)
2303>>> print(seq)
2304[]
2305>>> c.send(10)
2306>>> print(seq)
2307[10]
2308>>> c.send(10)
2309>>> print(seq)
2310[10, 20]
2311>>> c.send(10)
2312>>> print(seq)
2313[10, 20, 30]
2314
2315
2316Check some syntax errors for yield expressions:
2317
2318>>> f=lambda: (yield 1),(yield 2)
2319Traceback (most recent call last):
2320  ...
2321SyntaxError: 'yield' outside function
2322
2323>>> f=lambda: (yield from (1,2)), (yield from (3,4))
2324Traceback (most recent call last):
2325  ...
2326SyntaxError: 'yield from' outside function
2327
2328>>> yield from [1,2]
2329Traceback (most recent call last):
2330  ...
2331SyntaxError: 'yield from' outside function
2332
2333>>> def f(): x = yield = y
2334Traceback (most recent call last):
2335  ...
2336SyntaxError: assignment to yield expression not possible
2337
2338>>> def f(): (yield bar) = y
2339Traceback (most recent call last):
2340  ...
2341SyntaxError: cannot assign to yield expression here. Maybe you meant '==' instead of '='?
2342
2343>>> def f(): (yield bar) += y
2344Traceback (most recent call last):
2345  ...
2346SyntaxError: 'yield expression' is an illegal expression for augmented assignment
2347
2348
2349Now check some throw() conditions:
2350
2351>>> def f():
2352...     while True:
2353...         try:
2354...             print((yield))
2355...         except ValueError as v:
2356...             print("caught ValueError (%s)" % (v))
2357>>> import sys
2358>>> g = f()
2359>>> next(g)
2360
2361>>> g.throw(ValueError) # type only
2362caught ValueError ()
2363
2364>>> g.throw(ValueError("xyz"))  # value only
2365caught ValueError (xyz)
2366
2367>>> import warnings
2368>>> old_filters = warnings.filters.copy()
2369>>> warnings.filterwarnings("ignore", category=DeprecationWarning)
2370
2371# Filter DeprecationWarning: regarding the (type, val, tb) signature of throw().
2372# Deprecation warnings are re-enabled below.
2373
2374>>> g.throw(ValueError, ValueError(1))   # value+matching type
2375caught ValueError (1)
2376
2377>>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
2378caught ValueError (1)
2379
2380>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
2381caught ValueError (1)
2382
2383>>> g.throw(ValueError(1), "foo")       # bad args
2384Traceback (most recent call last):
2385  ...
2386TypeError: instance exception may not have a separate value
2387
2388>>> g.throw(ValueError, "foo", 23)      # bad args
2389Traceback (most recent call last):
2390  ...
2391TypeError: throw() third argument must be a traceback object
2392
2393>>> g.throw("abc")
2394Traceback (most recent call last):
2395  ...
2396TypeError: exceptions must be classes or instances deriving from BaseException, not str
2397
2398>>> g.throw(0)
2399Traceback (most recent call last):
2400  ...
2401TypeError: exceptions must be classes or instances deriving from BaseException, not int
2402
2403>>> g.throw(list)
2404Traceback (most recent call last):
2405  ...
2406TypeError: exceptions must be classes or instances deriving from BaseException, not type
2407
2408>>> def throw(g,exc):
2409...     try:
2410...         raise exc
2411...     except:
2412...         g.throw(*sys.exc_info())
2413>>> throw(g,ValueError) # do it with traceback included
2414caught ValueError ()
2415
2416>>> g.send(1)
24171
2418
2419>>> throw(g,TypeError)  # terminate the generator
2420Traceback (most recent call last):
2421  ...
2422TypeError
2423
2424>>> print(g.gi_frame)
2425None
2426
2427>>> g.send(2)
2428Traceback (most recent call last):
2429  ...
2430StopIteration
2431
2432>>> g.throw(ValueError,6)       # throw on closed generator
2433Traceback (most recent call last):
2434  ...
2435ValueError: 6
2436
2437>>> f().throw(ValueError,7)     # throw on just-opened generator
2438Traceback (most recent call last):
2439  ...
2440ValueError: 7
2441
2442>>> warnings.filters[:] = old_filters
2443
2444# Re-enable DeprecationWarning: the (type, val, tb) exception representation is deprecated,
2445#                               and may be removed in a future version of Python.
2446
2447Plain "raise" inside a generator should preserve the traceback (#13188).
2448The traceback should have 3 levels:
2449- g.throw()
2450- f()
2451- 1/0
2452
2453>>> def f():
2454...     try:
2455...         yield
2456...     except:
2457...         raise
2458>>> g = f()
2459>>> try:
2460...     1/0
2461... except ZeroDivisionError as v:
2462...     try:
2463...         g.throw(v)
2464...     except Exception as w:
2465...         tb = w.__traceback__
2466>>> levels = 0
2467>>> while tb:
2468...     levels += 1
2469...     tb = tb.tb_next
2470>>> levels
24713
2472
2473Now let's try closing a generator:
2474
2475>>> def f():
2476...     try: yield
2477...     except GeneratorExit:
2478...         print("exiting")
2479
2480>>> g = f()
2481>>> next(g)
2482>>> g.close()
2483exiting
2484>>> g.close()  # should be no-op now
2485
2486>>> f().close()  # close on just-opened generator should be fine
2487
2488>>> def f(): yield      # an even simpler generator
2489>>> f().close()         # close before opening
2490>>> g = f()
2491>>> next(g)
2492>>> g.close()           # close normally
2493
2494And finalization:
2495
2496>>> def f():
2497...     try: yield
2498...     finally:
2499...         print("exiting")
2500
2501>>> g = f()
2502>>> next(g)
2503>>> del g; gc_collect()  # For PyPy or other GCs.
2504exiting
2505
2506
2507GeneratorExit is not caught by except Exception:
2508
2509>>> def f():
2510...     try: yield
2511...     except Exception:
2512...         print('except')
2513...     finally:
2514...         print('finally')
2515
2516>>> g = f()
2517>>> next(g)
2518>>> del g; gc_collect()  # For PyPy or other GCs.
2519finally
2520
2521
2522Now let's try some ill-behaved generators:
2523
2524>>> def f():
2525...     try: yield
2526...     except GeneratorExit:
2527...         yield "foo!"
2528>>> g = f()
2529>>> next(g)
2530>>> g.close()
2531Traceback (most recent call last):
2532  ...
2533RuntimeError: generator ignored GeneratorExit
2534>>> g.close()
2535
2536
2537Our ill-behaved code should be invoked during GC:
2538
2539>>> with support.catch_unraisable_exception() as cm:
2540...     g = f()
2541...     next(g)
2542...     del g
2543...
2544...     cm.unraisable.exc_type == RuntimeError
2545...     "generator ignored GeneratorExit" in str(cm.unraisable.exc_value)
2546...     cm.unraisable.exc_traceback is not None
2547True
2548True
2549True
2550
2551And errors thrown during closing should propagate:
2552
2553>>> def f():
2554...     try: yield
2555...     except GeneratorExit:
2556...         raise TypeError("fie!")
2557>>> g = f()
2558>>> next(g)
2559>>> g.close()
2560Traceback (most recent call last):
2561  ...
2562TypeError: fie!
2563
2564
2565Ensure that various yield expression constructs make their
2566enclosing function a generator:
2567
2568>>> def f(): x += yield
2569>>> type(f())
2570<class 'generator'>
2571
2572>>> def f(): x = yield
2573>>> type(f())
2574<class 'generator'>
2575
2576>>> def f(): lambda x=(yield): 1
2577>>> type(f())
2578<class 'generator'>
2579
2580>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
2581>>> data = [1,2]
2582>>> g = f(data)
2583>>> type(g)
2584<class 'generator'>
2585>>> g.send(None)
2586'a'
2587>>> data
2588[1, 2]
2589>>> g.send(0)
2590'b'
2591>>> data
2592[27, 2]
2593>>> try: g.send(1)
2594... except StopIteration: pass
2595>>> data
2596[27, 27]
2597
2598"""
2599
2600refleaks_tests = """
2601Prior to adding cycle-GC support to itertools.tee, this code would leak
2602references. We add it to the standard suite so the routine refleak-tests
2603would trigger if it starts being uncleanable again.
2604
2605>>> import itertools
2606>>> def leak():
2607...     class gen:
2608...         def __iter__(self):
2609...             return self
2610...         def __next__(self):
2611...             return self.item
2612...     g = gen()
2613...     head, tail = itertools.tee(g)
2614...     g.item = head
2615...     return head
2616>>> it = leak()
2617
2618Make sure to also test the involvement of the tee-internal teedataobject,
2619which stores returned items.
2620
2621>>> item = next(it)
2622
2623
2624
2625This test leaked at one point due to generator finalization/destruction.
2626It was copied from Lib/test/leakers/test_generator_cycle.py before the file
2627was removed.
2628
2629>>> def leak():
2630...    def gen():
2631...        while True:
2632...            yield g
2633...    g = gen()
2634
2635>>> leak()
2636
2637
2638
2639This test isn't really generator related, but rather exception-in-cleanup
2640related. The coroutine tests (above) just happen to cause an exception in
2641the generator's __del__ (tp_del) method. We can also test for this
2642explicitly, without generators. We do have to redirect stderr to avoid
2643printing warnings and to doublecheck that we actually tested what we wanted
2644to test.
2645
2646>>> from test import support
2647>>> class Leaker:
2648...     def __del__(self):
2649...         def invoke(message):
2650...             raise RuntimeError(message)
2651...         invoke("del failed")
2652...
2653>>> with support.catch_unraisable_exception() as cm:
2654...     l = Leaker()
2655...     del l
2656...
2657...     cm.unraisable.object == Leaker.__del__
2658...     cm.unraisable.exc_type == RuntimeError
2659...     str(cm.unraisable.exc_value) == "del failed"
2660...     cm.unraisable.exc_traceback is not None
2661True
2662True
2663True
2664True
2665
2666
2667These refleak tests should perhaps be in a testfile of their own,
2668test_generators just happened to be the test that drew these out.
2669
2670"""
2671
2672__test__ = {"tut":      tutorial_tests,
2673            "pep":      pep_tests,
2674            "email":    email_tests,
2675            "fun":      fun_tests,
2676            "syntax":   syntax_tests,
2677            "conjoin":  conjoin_tests,
2678            "weakref":  weakref_tests,
2679            "coroutine":  coroutine_tests,
2680            "refleaks": refleaks_tests,
2681            }
2682
2683def load_tests(loader, tests, pattern):
2684    tests.addTest(doctest.DocTestSuite())
2685    return tests
2686
2687
2688if __name__ == "__main__":
2689    unittest.main()
2690